public async Task KickGroupItem() { var sequence = new WorkItem(1, null, 0, WorkflowType.Sequence, "root") { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren }; _repository.Add(sequence); var machine = Substitute.For<IStateMachine>(); _stateMachineProvider.Resolve(null).ReturnsForAnyArgs(machine); machine.Transit(Arg.Any<WorkItem>(), Arg.Do<IEngine>(engine => { sequence.Status = WorkItemStatus.Completed; _repository.Update(sequence); engine.Kick(null); })); _engine.Kick(sequence.Id); await _engine.Completion; Assert.Equal(WorkItemStatus.Completed, sequence.Status); }
public void ParallelChildren() { var a = Helpers.BuildActivity(); var b = Helpers.BuildActivity(); var p = new ParallelBlock(new[] {a, b}, 1); var wi = new WorkItem(Helpers.Integer(), Helpers.Integer(), 0, WorkflowType.Parallel, Helpers.String()) { InputId = Helpers.Integer() }; _navigator.Find(wi.WorkflowPath).Returns(p); var items = _workItemBuilder.BuildChildren(wi).ToArray(); var item1 = items[0]; var item2 = items[1]; Assert.Equal(2, items.Count()); Assert.Equal(wi.JobId, item1.JobId); Assert.Equal(wi.Id, item1.ParentId); Assert.Equal(0, item1.Order); Assert.Equal(wi.InputId, item1.InputId); Assert.Equal(WorkflowType.Activity, item1.Type); Assert.Equal(wi.JobId, item2.JobId); Assert.Equal(wi.Id, item2.ParentId); Assert.Equal(1, item2.Order); Assert.Equal(wi.InputId, item2.InputId); Assert.Equal(WorkflowType.Activity, item2.Type); _navigator.Received(1).Path(p.Children.First()); _navigator.Received(1).Path(p.Children.Last()); }
public void ShouldThrowExceptionIfExceedsMaxRetryCount() { var parent = new WorkItem(1, Helpers.Integer(), 0, WorkflowType.Activity, Helpers.String()) { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Retry }; var child = new WorkItem(1, parent.Id, 2, WorkflowType.Activity, Helpers.String()) { Id = Helpers.Integer(), Status = WorkItemStatus.Failed, ExceptionId = Helpers.Integer() }; _repository.GetLastChildByOrder(parent.Id).Returns(child); _workflowPathNavigator.Find(parent.WorkflowPath).Returns(Helpers.BuildRetry()); _stateMachine.Transit(parent, _engine); _repository.Received(1) .Update(Arg.Is<WorkItem>(_ => _.ExceptionId == child.ExceptionId && _.Status == WorkItemStatus.Failed)); _engine.Received(1).Rescure(parent.Id); }
public void TransitToCompleted() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Sequence, ParentId = Helpers.Integer() }; var last = new WorkItem {OutputId = Helpers.Integer()}; _repository.CountInProgressChildren(workItem.Id).Returns(0); _repository.FindRunnableChildrenByOrder(workItem.Id, 1).Returns(Enumerable.Empty<WorkItem>()); _repository.GetLastChildByOrder(workItem.Id).Returns(last); _stateMachine.Transit(workItem, _engine); _repository.Received(1).Update(workItem); Assert.Equal(WorkItemStatus.Completed, workItem.Status); _engine.Received(1).Kick(workItem.ParentId); }
public void ForkChildren() { var a = Helpers.BuildActivity(); var f = new ForkBlock(a, 1); var wi = new WorkItem(Helpers.Integer(), Helpers.Integer(), 0, WorkflowType.Fork, Helpers.String()) { InputId = Helpers.Integer() }; _navigator.Find(wi.WorkflowPath).Returns(f); _dataStore.SplitAndGetIds(wi.InputId.Value).Returns(new[] {1, 2}); wi.Id = Helpers.Integer(); var items = _workItemBuilder.BuildChildren(wi).ToArray(); var item1 = items[0]; var item2 = items[1]; Assert.Equal(2, items.Count()); Assert.Equal(wi.JobId, item1.JobId); Assert.Equal(wi.Id, item1.ParentId); Assert.Equal(0, item1.Order); Assert.Equal(1, item1.InputId); Assert.Equal(WorkflowType.Activity, item1.Type); Assert.Equal(wi.JobId, item2.JobId); Assert.Equal(wi.Id, item2.ParentId); Assert.Equal(1, item2.Order); Assert.Equal(2, item2.InputId); Assert.Equal(WorkflowType.Activity, item2.Type); _navigator.Received(2).Path(a); }
public void Transit(WorkItem workItem, IEngine engine) { if (workItem == null) throw new ArgumentNullException("workItem"); if (workItem.Type != WorkflowType.Sequence) throw new ArgumentException("type must be sequence"); var status = workItem.Status; switch (status) { case WorkItemStatus.Created: _repository.DeleteChildren(workItem.Id); var children = _workItemBuilder.BuildChildren(workItem); _repository.AddAll(children); workItem.Status = WorkItemStatus.WaitingForChildren; _repository.Update(workItem); engine.Kick(workItem.Id); break; case WorkItemStatus.WaitingForChildren: if (_repository.HasFailedChildren(workItem.Id)) return; var inProgress = _repository.CountInProgressChildren(workItem.Id); if (inProgress == 0) { var next = _repository.FindRunnableChildrenByOrder(workItem.Id, 1).SingleOrDefault(); if (next == null) // all complete { var last = _repository.GetLastChildByOrder(workItem.Id); workItem.OutputId = last.OutputId; workItem.Status = WorkItemStatus.Completed; _repository.Update(workItem); engine.Kick(workItem.ParentId); } else { if (next.Order > 0) { var previous = _repository.GetChildByOrder(workItem.Id, next.Order - 1); next.InputId = previous.OutputId; _repository.Update(next); } engine.Kick(next.Id); } } break; } }
public void Transit(WorkItem workItem, IEngine engine) { if (workItem == null) throw new ArgumentNullException("workItem"); if (workItem.Type != WorkflowType.Fork) throw new ArgumentException("type must be fork"); var status = workItem.Status; var definition = (ForkBlock) _workflowPathNavigator.Find(workItem.WorkflowPath); switch (status) { case WorkItemStatus.Created: _repository.DeleteChildren(workItem.Id); var children = _workItemBuilder.BuildChildren(workItem); _repository.AddAll(children); workItem.Status = WorkItemStatus.WaitingForChildren; _repository.Update(workItem); engine.Kick(workItem.Id); break; case WorkItemStatus.WaitingForChildren: if (_repository.HasFailedChildren(workItem.Id)) return; var inProgress = _repository.CountInProgressChildren(workItem.Id); var newWorkers = definition.MaxWorkers - inProgress; if (newWorkers > 0) { var workItems = _repository.FindRunnableChildrenByOrder(workItem.Id, newWorkers); if (workItems.Count > 0) { foreach (var runnable in workItems) { engine.Kick(runnable.Id); } } else if (inProgress == 0) { var ids = _repository.LoadChildOutputIds(workItem.Id); workItem.OutputId = _dataStore.AddReferences(workItem.JobId, ids, definition.Child.OutputType); workItem.Status = WorkItemStatus.Completed; _repository.Update(workItem); engine.Kick(workItem.ParentId); } } break; } }
public async Task MethodWithReturnValue() { Func<int> func = () => 1; var workItem = new WorkItem {JobId = Helpers.Integer()}; var activity = new ActivityBlock(func); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(activity); await _activityRunner.Run(workItem); _dataStore.Received(1).Add(workItem.JobId, 1, typeof (int)); }
public async Task IgnoringInput() { var invoked = false; Action action = delegate { invoked = true; }; var workItem = new WorkItem {InputId = Helpers.Integer()}; var activity = new ActivityBlock(action); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(activity); await _activityRunner.Run(workItem); Assert.True(invoked); }
public async Task CheckPostConditionWhenSuccess() { Action action = delegate { }; var workItem = new WorkItem(); var activity = new ActivityBlock(action); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(activity); await _activityRunner.Run(workItem); Assert.Equal(WorkItemStatus.Completed, workItem.Status); _repository.Received().Update(Arg.Is<WorkItem>(_ => _.Status == WorkItemStatus.Completed)); }
public async Task MethodWithNoArgumentAndReturnValue() { var invoked = false; Action action = delegate { invoked = true; }; var workItem = new WorkItem(); var activity = new ActivityBlock(action); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(activity); await _activityRunner.Run(workItem); Assert.True(invoked); }
public int Add(WorkItem workItem) { lock (_dataStore) { var maxId = _dataStore.Keys.Any() ? _dataStore.Keys.Max() : 0; var id = maxId + 1; workItem.Id = id; _dataStore[id] = (WorkItem) workItem.Clone(); return id; } }
IEnumerable<WorkItem> BuildForPallaral(WorkItem workItem) { var definition = (ParallelBlock) _pathNavigator.Find(workItem.WorkflowPath); var children = definition.Children.Select( (child, index) => new WorkItem(workItem.JobId, workItem.Id, index, child.Type, _pathNavigator.Path(child)) { InputId = workItem.InputId }).ToArray(); return children; }
IEnumerable<WorkItem> BuildForRetry(WorkItem workItem) { var definition = (RetryBlock) _pathNavigator.Find(workItem.WorkflowPath); var child = definition.Children.Single(); return new[] { new WorkItem(workItem.JobId, workItem.Id, 0, child.Type, _pathNavigator.Path(child)) { InputId = workItem.InputId } }; }
public async Task MethodWithSingleArgument() { Func<int, int> identity = i => i; var workItem = new WorkItem {JobId = Helpers.Integer(), InputId = Helpers.Integer()}; var activity = new ActivityBlock(identity); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(activity); _dataStore.Get(workItem.InputId.Value).Returns(2); await _activityRunner.Run(workItem); _dataStore.Received(1).Add(workItem.JobId, 2, typeof (int)); }
public void ShouldNotContinueIfHasAnyFailedChild() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Sequence }; _repository.HasFailedChildren(Arg.Any<int>()).Returns(true); _stateMachine.Transit(workItem, _engine); _engine.DidNotReceiveWithAnyArgs().Kick(null); }
IEnumerable<WorkItem> BuildForFork(WorkItem workItem) { var definition = (ForkBlock) _pathNavigator.Find(workItem.WorkflowPath); var inputIds = _dataStore.SplitAndGetIds(workItem.InputId.Value); var children = inputIds.Select( (id, index) => new WorkItem(workItem.JobId, workItem.Id, index, definition.Child.Type, _pathNavigator.Path(definition.Child)) {InputId = id}) .ToArray(); return children; }
public IEnumerable<WorkItem> BuildChildren(WorkItem workItem) { if (workItem.Type == WorkflowType.Fork) return BuildForFork(workItem); if (workItem.Type == WorkflowType.Sequence) return BuildForSequence(workItem); if (workItem.Type == WorkflowType.Parallel) return BuildForPallaral(workItem); if (workItem.Type == WorkflowType.Retry) return BuildForRetry(workItem); throw new ArgumentException("workItem.Type not supported"); }
public void Test() { var repository = Substitute.For<IWorkItemRepository>(); var engine = Substitute.For<IEngine>(); var machine = new ActivityStateMachine(repository); var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.Created }; machine.Transit(workItem, engine); repository.Received(1).Update(Arg.Is<WorkItem>(_ => _.Status == WorkItemStatus.Pending)); engine.Received(1).PostActivity(workItem.Id); }
public async Task Run(WorkItem workItem) { if (workItem == null) throw new ArgumentNullException("workItem"); if (workItem.Type != WorkflowType.Activity) throw new ArgumentException("type must be Activity"); var definition = _workflowPathNavigator.Find(workItem.WorkflowPath); if (definition == null) throw new Exception("definition not found with path: " + workItem.WorkflowPath); //todo: specific exception if (!(definition is ActivityBlock)) throw new Exception("definition is expected to be ActivityBlock but actually received: " + definition.Type); //todo: specific excpetion var activity = (ActivityBlock) definition; try { await RunCore(workItem, activity); workItem.Status = WorkItemStatus.Completed; } catch (Exception ex) { // todo: Here exception is thrown from dynamic invoke, the actual failure is inside InnerException, is that safe to lose context? // todo: log original exception var actualException = ex.InnerException ?? ex; // todo: consider combine it with engine if (definition.ExceptionHandler == null) { workItem.ExceptionId = _dataStore.Add(workItem.JobId, actualException, actualException.GetType()); workItem.Status = WorkItemStatus.Failed; } else { var output = definition.ExceptionHandler.DynamicInvoke(actualException); workItem.OutputId = _dataStore.Add(workItem.JobId, output, activity.OutputType); workItem.Status = WorkItemStatus.Completed; } } _repository.Update(workItem); }
public void BubblesUpIfHandlerHasNotDefined() { var root = new WorkItem { JobId = Helpers.Integer(), WorkflowPath = "root" }; _workItemRepo.Add(root); var parent = new WorkItem { ParentId = root.Id, WorkflowPath = "parent" }; _workItemRepo.Add(parent); var child = new WorkItem { ExceptionId = Helpers.Integer(), ParentId = parent.Id, WorkflowPath = "child", Status = WorkItemStatus.Failed }; _workItemRepo.Add(child); _navigator.Find(root.WorkflowPath).Returns(Helpers.BuildFork()); _navigator.Find(parent.WorkflowPath).Returns(Helpers.BuildFork()); _stateMachine.Transit(child, _engine); var wi = _workItemRepo.Get(parent.Id); Assert.Equal(WorkItemStatus.Failed, wi.Status); Assert.Equal(child.ExceptionId, wi.ExceptionId); wi = _workItemRepo.Get(root.Id); Assert.Equal(WorkItemStatus.Failed, wi.Status); Assert.Equal(child.ExceptionId, wi.ExceptionId); _engine.Kick(null); }
public void KickOffNext() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Sequence }; var next = new WorkItem {Id = Helpers.Integer(), OutputId = Helpers.Integer(), Order = 1}; var previous = new WorkItem {OutputId = Helpers.Integer()}; _repository.CountInProgressChildren(workItem.Id).Returns(0); _repository.FindRunnableChildrenByOrder(workItem.Id, 1).Returns(new[] {next}); _repository.GetChildByOrder(workItem.Id, next.Order - 1).Returns(previous); _stateMachine.Transit(workItem, _engine); Assert.Equal(previous.OutputId, next.InputId); _engine.Received(1).Kick(next.Id); }
public IStateMachine Resolve(WorkItem workItem) { if (workItem == null) throw new ArgumentNullException("workItem"); if (workItem.Status == WorkItemStatus.Failed) return new ExceptionStateMachine(_repository, _dataStore, _workflowPathNavigator); switch (workItem.Type) { case WorkflowType.Activity: return new ActivityStateMachine(_repository); case WorkflowType.Fork: return new ForkStateMachine(_repository, _workItemBuilder, _workflowPathNavigator, _dataStore); case WorkflowType.Parallel: return new ParallelStateMachine(_repository, _workItemBuilder, _workflowPathNavigator, _dataStore); case WorkflowType.Sequence: return new SequenceStateMachine(_repository, _workItemBuilder); case WorkflowType.Retry: return new RetryStateMachine(_repository, _workItemBuilder, _workflowPathNavigator); } throw new ArgumentException("workItem type does not support: " + workItem.Type); }
public void ShouldRetryIfChildFailed() { var parent = new WorkItem(1, Helpers.Integer(), 0, WorkflowType.Activity, Helpers.String()) { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Retry }; var child = new WorkItem(1, parent.Id, 0, WorkflowType.Activity, "child") { Id = Helpers.Integer(), Status = WorkItemStatus.Failed }; _repository.GetLastChildByOrder(parent.Id).Returns(child); _workflowPathNavigator.Find(parent.WorkflowPath).Returns(Helpers.BuildRetry(2)); _stateMachine.Transit(parent, _engine); _repository.Received(1).Add(Arg.Is<WorkItem>(_ => _.Status == WorkItemStatus.Created)); _engine.ReceivedWithAnyArgs(1).Kick(null); }
public void KicksOffChildWorkItemsIfNumberOfInProgressIsLessThanMaxWorkers() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Fork }; var child = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.Created, Type = WorkflowType.Activity }; _repository.CountInProgressChildren(workItem.Id).Returns(0); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(Helpers.BuildFork(2)); _repository.FindRunnableChildrenByOrder(workItem.Id, 2).ReturnsForAnyArgs(new[] {child}); _stateMachine.Transit(workItem, _engine); _engine.Received(1).Kick(child.Id); }
async Task RunCore(WorkItem workItem, ActivityBlock activityBlock) { workItem.Status = WorkItemStatus.Running; _repository.Update(workItem); object output; var inputTypes = activityBlock.InputTypes.ToArray(); if (inputTypes.Length == 0) { if (activityBlock.IsAsync) { var task = (Task) activityBlock.Method.DynamicInvoke(); await task; output = task.GetResult(); } else { output = activityBlock.Method.DynamicInvoke(); } } else if (inputTypes.Length == 1) { var arg = _dataStore.Get(workItem.InputId.Value); if (inputTypes[0].IsTuple() && arg is object[]) // If output is array from parallel and input of continuation is Tuple { var ctor = inputTypes[0].GetConstructors().Single(); arg = ctor.Invoke((object[]) arg); } if (activityBlock.IsAsync) { var task = (Task) activityBlock.Method.DynamicInvoke(arg); await task; output = task.GetResult(); } else { output = activityBlock.Method.DynamicInvoke(arg); } } else { var args = (object[]) _dataStore.Get(workItem.InputId.Value); if (activityBlock.IsAsync) { var task = (Task) activityBlock.Method.DynamicInvoke(args); await task; output = task.GetResult(); } else { output = activityBlock.Method.DynamicInvoke(args); } } if (output != null) { workItem.OutputId = _dataStore.Add(workItem.JobId, output, activityBlock.OutputType); } }
public void TransitToCompleted() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.WaitingForChildren, Type = WorkflowType.Fork, ParentId = Helpers.Integer() }; var parent = new WorkItem(); _repository.GetParent(workItem).Returns(parent); _repository.CountInProgressChildren(workItem.Id).Returns(0); _workflowPathNavigator.Find(null).ReturnsForAnyArgs(Helpers.BuildFork(2)); _repository.FindRunnableChildrenByOrder(workItem.Id, 2).ReturnsForAnyArgs(Enumerable.Empty<WorkItem>()); var outputIds = new[] {1, 2}; _repository.LoadChildOutputIds(workItem.Id).Returns(outputIds); var outputId = Helpers.Integer(); _dataStore.AddReferences(workItem.JobId, outputIds, typeof (int)).Returns(outputId); _stateMachine.Transit(workItem, _engine); Assert.Equal(outputId, workItem.OutputId); Assert.Equal(WorkItemStatus.Completed, workItem.Status); _engine.Received(1).Kick(workItem.ParentId); }
public void CanRescure() { var parent = new WorkItem { JobId = Helpers.Integer(), WorkflowPath = "parent" }; var parentId = _workItemRepo.Add(parent); var child = new WorkItem { ParentId = parentId, WorkflowPath = "child", Status = WorkItemStatus.Failed, ExceptionId = Helpers.Integer() }; _workItemRepo.Add(child); var definition = Helpers.BuildFork(); definition.ExceptionHandler = (Func<Exception, int>) (_ => 1); _navigator.Find(parent.WorkflowPath).Returns(definition); _dataStore.Add(parent.JobId, 1, typeof (int)).ReturnsForAnyArgs(1); _stateMachine.Transit(child, _engine); parent = _workItemRepo.Get(parentId); Assert.Equal(WorkItemStatus.Completed, parent.Status); Assert.Equal(1, parent.OutputId); _engine.Kick(parentId); }
public void TransitToWaitingForChildren() { var workItem = new WorkItem { Id = Helpers.Integer(), Status = WorkItemStatus.Created, Type = WorkflowType.Sequence }; _stateMachine.Transit(workItem, _engine); _repository.Received(1).DeleteChildren(workItem.Id); _repository.ReceivedWithAnyArgs(1).AddAll(null); Assert.Equal(WorkItemStatus.WaitingForChildren, workItem.Status); _engine.Received(1).Kick(workItem.Id); }
public void ShouldNotContinueIfParentIsFinalState(WorkItemStatus state) { var parent = new WorkItem { JobId = Helpers.Integer(), WorkflowPath = "parent", Status = state }; var parentId = _workItemRepo.Add(parent); var child = new WorkItem { ParentId = parentId, WorkflowPath = "child", Status = WorkItemStatus.Failed, ExceptionId = Helpers.Integer() }; _navigator.Find(parent.WorkflowPath).Returns(Helpers.BuildFork()); _dataStore.Add(parent.JobId, 1, typeof (int)).ReturnsForAnyArgs(1); _stateMachine.Transit(child, _engine); _engine.DidNotReceiveWithAnyArgs().Kick(null); }