public async Task TestFaultingAndCancellation() { foreach (bool fault in DataflowTestHelpers.BooleanValues) { var cts = new CancellationTokenSource(); var tb = new TransformBlock <int, int>(i => i, new ExecutionDataflowBlockOptions { CancellationToken = cts.Token }); tb.PostRange(0, 4); Assert.Equal(expected: 0, actual: await tb.ReceiveAsync()); Assert.Equal(expected: 1, actual: await tb.ReceiveAsync()); if (fault) { Assert.Throws <ArgumentNullException>(() => ((IDataflowBlock)tb).Fault(null)); ((IDataflowBlock)tb).Fault(new InvalidCastException()); await Assert.ThrowsAsync <InvalidCastException>(() => tb.Completion); } else { cts.Cancel(); await Assert.ThrowsAnyAsync <OperationCanceledException>(() => tb.Completion); } Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); } }
public async Task TestOrdering_Sync_OrderedDisabled() { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false }; var mres = new ManualResetEventSlim(); var tb = new TransformBlock <int, int>(i => { if (i == 0) { mres.Wait(); } return(i); }, options); tb.Post(0); tb.Post(1); Assert.Equal(1, await tb.ReceiveAsync()); mres.Set(); Assert.Equal(0, await tb.ReceiveAsync()); tb.Complete(); await tb.Completion; }
public async Task <SessionIdentifier> InitializeAsync(int age) { _dataStoreBlock.Post(new CreateSessionRequest(age)); var sessionId = await _spectraServiceBlock.ReceiveAsync(); return((SessionIdentifier)sessionId); }
public static void Run() { Func <int, int> fn = n => { Thread.Sleep(1000); return(n * n); }; var tfBlock = new TransformBlock <int, int>(fn); for (int i = 0; i < 10; i++) { tfBlock.Post(i); } // Receive Async returns a Task for (int i = 0; i < 10; i++) { Task <int> resultTask = tfBlock.ReceiveAsync(); int result = resultTask.Result; // Calling Result will wait until it has a value ready Console.WriteLine(result); } Console.WriteLine("Done"); //Instead the ReceiveAsync() method returns a Task<T> //that represents the receive operation.Calling the Result() method on the returned Task forces the //program to wait until data becomes available essentially making it a synchronous operation like the //previous example with the same console output //RESULT IS THE SAME }
public virtual async Task <T> GetNextItemAsync() { if (_getDataFunc == null) { throw new ArgumentNullException($"{nameof(GetDataFrom)} hasn't been called"); } EnsureFetchTaskIsRunning(); if (InternalBuffer == null) { throw new DataflowException("You must call StartInput first. The buffer has not been initialized"); } var fetchCalled = TryFetchData(); if (!HasItemsInQueue) { return(null); } //if (fetchCalled) _dataLoadSemaphore.WaitOne(); var item = await _propagationBlock.ReceiveAsync(); if (ItemAvailableforProcessing != null) { await ItemAvailableforProcessing.Invoke(this, new DataSourceEventArgs <object> { Items = new[] { item } }); } _signal.Set(); return(item); }
static public void Run() { Func <int, int> fn = n => { Thread.Sleep(1000); return(n * n); }; var tfBlock = new TransformBlock <int, int>(fn); for (int i = 0; i < 10; i++) { tfBlock.Post(i); } // RecieveAsynch returns a Task for (int i = 0; i < 10; i++) { Task <int> resultTask = tfBlock.ReceiveAsync(); int result = resultTask.Result; // Calling Result will wait until it has a value ready Console.WriteLine(result); } Console.WriteLine("Done"); }
public async Task TestNullTasksIgnored() { foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var tb = new TransformBlock <int, int>(i => { if ((i % 2) == 0) { return(null); } return(Task.Run(() => i)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }); const int Iters = 100; tb.PostRange(0, Iters); tb.Complete(); for (int i = 0; i < Iters; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } } await tb.Completion; } }
public async Task TestOrdering_Async_OrderedDisabled() { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false }; var tasks = new TaskCompletionSource <int> [10]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = new TaskCompletionSource <int>(); } var tb = new TransformBlock <int, int>(i => tasks[i].Task, options); tb.PostRange(0, tasks.Length); for (int i = tasks.Length - 1; i >= 0; i--) { tasks[i].SetResult(i); Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
[InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool?EnsureOrdered) { const int iters = 1000; var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; if (EnsureOrdered == null) { Assert.True(options.EnsureOrdered); } else { options.EnsureOrdered = EnsureOrdered.Value; } var tb = new TransformBlock <int, int>(i => Task.FromResult(i), options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
/// <summary> /// Asynchronously update the state /// </summary> /// <param name="updateBlock">Builder update block</param> /// <returns>Returns the task representing the update of the state</returns> public Task UpdateState(Func <THeldStateBuilder, THeldStateBuilder> updateBlock) { void UpdateTransaction(BehaviorSubject <THeldState> currentStateSubject) { var builder = default(THeldStateBuilder); builder.InitializeFrom(currentStateSubject.Value); try { var newState = updateBlock(builder).Build(); currentStateSubject.OnNext(newState); } catch (Exception ex) { Console.WriteLine("ERROR: Update state transaction failed"); Console.WriteLine(ex); } } var id = Guid.NewGuid(); var tx = new KeyValuePair <Guid, TransactionFunc>(id, UpdateTransaction); var didSend = _transactionBufferEx.Post(tx); if (!didSend) { throw new ApplicationException( "UpdateState failed to process transaction. This probably means the BufferBlock is not initialized properly"); } var completed = _transactionBufferEx.ReceiveAsync(x => x == id); return(completed); }
private static async Task SimpleDemoWithDelayAsync() { Console.WriteLine("TransformBlockDemo has started!"); var block = new TransformBlock <int, string>( // by default singlethreaded async(input) => { await Task.Delay(500).ConfigureAwait(false); return(input.ToString()); }); for (int i = 0; i < 10; i++) { block.Post(i); Console.WriteLine($"TransformBlock input queue count: {block.InputCount}"); } block.Complete(); // No mo data. while (await block.OutputAvailableAsync().ConfigureAwait(false)) { Console.WriteLine($"TransformBlock OutputCount: {block.InputCount}"); var output = await block.ReceiveAsync().ConfigureAwait(false); Console.WriteLine($"TransformBlock TransformOutput: {output}"); Console.WriteLine($"TransformBlock OutputCount: {block.OutputCount}"); // will always be 0, since receive data is a blocking action and this transformblock is single threaded } // wait for completion. await block.Completion.ConfigureAwait(false); Console.WriteLine("Finished!"); Console.ReadKey(); }
public static void Run() { Func <int, int> fn = n => { Thread.Sleep(1000); return(n * n); }; var tfBlock = new TransformBlock <int, int>(fn); for (int i = 0; i < 10; i++) { tfBlock.Post(i); } Action <Task <int> > whenReady = task => { int n = task.Result; Console.WriteLine(n); }; for (int i = 0; i < 10; i++) { Task <int> resultTask = tfBlock.ReceiveAsync(); resultTask.ContinueWith(whenReady); // When 'resultTask' is done, // call 'whenReady' with the Task } Console.WriteLine("Done"); }
/// <summary> /// Simplified method for async operations that don't need to be chained, and when the result can fit in memory /// </summary> public static async Task <IReadOnlyCollection <R> > BlockTransform <T, R>(this IEnumerable <T> source, Func <T, Task <R> > transform, int parallelism = 1, int?capacity = null, Action <BulkProgressInfo <R> > progressUpdate = null, TimeSpan progressPeriod = default(TimeSpan)) { progressPeriod = progressPeriod == default(TimeSpan) ? 10.Seconds() : progressPeriod; var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = parallelism }; if (capacity.HasValue) { options.BoundedCapacity = capacity.Value; } var block = new TransformBlock <T, R>(transform, options); var totalProgress = Stopwatch.StartNew(); var swProgress = Stopwatch.StartNew(); // by producing asynchronously and using SendAsync we can throttle how much we can form the source and consume at the same time var produce = Produce(source, block); var result = new List <R>(); var newResults = new List <R>(); while (true) { var outputAvailableTask = block.OutputAvailableAsync(); var completedTask = await Task.WhenAny(outputAvailableTask, Task.Delay(progressPeriod)); if (completedTask == outputAvailableTask) { var available = await outputAvailableTask; if (!available) { break; } var item = await block.ReceiveAsync(); newResults.Add(item); result.Add(item); } var elapsed = swProgress.Elapsed; if (elapsed > progressPeriod) { progressUpdate?.Invoke(new BulkProgressInfo <R>(result, newResults, elapsed)); swProgress.Restart(); newResults.Clear(); } } progressUpdate?.Invoke(new BulkProgressInfo <R>(result, result, totalProgress.Elapsed)); await Task.WhenAll(produce, block.Completion); return(result); }
private async void Receive2() { while (true) { string result = await mTransBlock2.ReceiveAsync(); if (!this.IsDisposed) { txtResult2.AppendText(result + ","); } } }
private static async Task <List <DispatchingResult> > ConsumeProjectionsFlow(TransformBlock <List <ProjectionDescriptor>, List <DispatchingResult> > flow, int activeDescriptors, CancellationToken token) { var results = new List <DispatchingResult>(activeDescriptors); while (await flow.OutputAvailableAsync(token).NotOnCapturedContext()) { var r = await flow.ReceiveAsync(token).NotOnCapturedContext(); results.AddRange(r); } return(results); }
private async void btnStart_Click(object sender, EventArgs e) { txtResult.Clear(); for (int i = 0; i < 100; i++) { mTransBlock.Post(i + 1); } while (!this.IsDisposed) { string r = await mTransBlock.ReceiveAsync(); txtResult.AppendText(r + ","); } }
public async Task TestProducerConsumer() { foreach (TaskScheduler scheduler in new[] { TaskScheduler.Default, new ConcurrentExclusiveSchedulerPair().ConcurrentScheduler }) { foreach (int maxMessagesPerTask in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { foreach (int dop in new[] { 1, 2 }) { foreach (bool sync in DataflowTestHelpers.BooleanValues) { const int Messages = 100; var options = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop, MaxMessagesPerTask = maxMessagesPerTask, TaskScheduler = scheduler }; TransformBlock <int, int> tb = sync ? new TransformBlock <int, int>(i => i, options) : new TransformBlock <int, int>(i => TaskShim.Run(() => i), options); await TaskShim.WhenAll( TaskShim.Run(async delegate { // consumer int i = 0; while (await tb.OutputAvailableAsync()) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); i++; } }), TaskShim.Run(async delegate { // producer for (int i = 0; i < Messages; i++) { await tb.SendAsync(i); } tb.Complete(); })); } } } } } }
private async Task <List <MessageEnvelope> > ConsumeDeserializedEnvelopes(TransformBlock <MessageRaw, MessageEnvelope> deserializeBlock, CancellationToken token) { var capacity = deserializeBlock.InputCount + deserializeBlock.OutputCount; if (capacity == 0) { capacity = BatchSize / 4; } var envelopes = new List <MessageEnvelope>(capacity); while (await deserializeBlock.OutputAvailableAsync(token).NotOnCapturedContext()) { var e = await deserializeBlock.ReceiveAsync(token).NotOnCapturedContext(); envelopes.Add(e); } return(envelopes); }
private async Task <List <DispatchingResult> > ConsumeProjectionDispatchersFlow(TransformBlock <DispatchingContext, DispatchingResult> flow, CancellationToken token) { var capacity = flow.InputCount + flow.OutputCount; if (capacity == 0) { capacity = BatchSize / 4; } var results = new List <DispatchingResult>(capacity); while (await flow.OutputAvailableAsync(token).NotOnCapturedContext()) { var e = await flow.ReceiveAsync(token).NotOnCapturedContext(); results.Add(e); } return(results); }
public async Task <WebSocket> AcceptWebSocketAsync(CancellationToken token) { try { var result = await _negotiationQueue.ReceiveAsync(token).ConfigureAwait(false); if (result.Error != null) { result.Error.Throw(); return(null); } else { return(result.Result); } } catch (OperationCanceledException) { return(null); } }
/// <summary> /// Executes a ForEach loop in which executions may run in parallel. Returns a set of correlated values. /// </summary> /// <typeparam name="TSource">The type of items in the source data.</typeparam> /// <typeparam name="TValue">The type of items to return.</typeparam> /// <param name="source">An enumerable data source.</param> /// <param name="options">Options that control the loop execution.</param> /// <param name="func">The delegate that is invoked once per iteration.</param> /// <returns></returns> public static async ValueTask <IReadOnlyDictionary <TSource, TValue> > ForEachAsync <TSource, TValue>(IEnumerable <TSource> source, ParallelOptions options, Func <TSource, Task <KeyValuePair <TSource, TValue> > > func) { if (source == null) { return(ImmutableDictionary <TSource, TValue> .Empty); } if (func == null) { throw new ArgumentNullException(nameof(func)); } var dict = new ConcurrentDictionary <TSource, TValue>(); var opt = Build(options); var block = new TransformBlock <TSource, KeyValuePair <TSource, TValue> >(func, opt); // Send var count = 0; foreach (var item in source) { await block.SendAsync(item, opt.CancellationToken).ConfigureAwait(false); count++; } // Receive for (var i = 0; i < count; i++) { var value = await block.ReceiveAsync(opt.CancellationToken).ConfigureAwait(false); dict[value.Key] = value.Value; } block.Complete(); await block.Completion.ConfigureAwait(false); return(dict); }
public async Task TestOrdering() { const int iters = 1000; foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 }) { foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded }) { var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; var tb = new TransformBlock <int, int>(i => i, options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; } } }
public async Task TestCancellationExceptionsIgnored() { var t = new TransformBlock <int, int>(i => { if ((i % 2) == 0) { throw new OperationCanceledException(); } return(i); }); t.PostRange(0, 2); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } } await t.Completion; }
public async Task <IEnumerable <Task <Indexed <PlaceFileResult> > > > FetchThenPutBulkAsync(OperationContext context, IReadOnlyList <ContentHashWithPath> args, IContentSession contentSession) { var putFilesBlock = new TransformBlock <Indexed <ContentHashWithPath>, Indexed <PlaceFileResult> >( async indexed => { return(new Indexed <PlaceFileResult>(await CreateTempAndPutAsync(context, indexed.Item.Hash, contentSession), indexed.Index)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = _maxParallelPlaces }); putFilesBlock.PostAll(args.AsIndexed()); var copyFilesLocally = await Task.WhenAll( Enumerable.Range(0, args.Count).Select(i => putFilesBlock.ReceiveAsync(context.Token))); putFilesBlock.Complete(); return(copyFilesLocally.AsTasks()); }
/// <summary> /// Executes a For loop in which executions may run in parallel. Returns a set of correlated values. /// </summary> /// <typeparam name="TValue">The type of items to return.</typeparam> /// <param name="fromInclusive">The start index, inclusive.</param> /// <param name="toExclusive">The end index, exclusive.</param> /// <param name="options">Options that control the loop execution.</param> /// <param name="func">The delegate that is invoked once per iteration.</param> /// <returns></returns> public static async Task <IReadOnlyDictionary <int, TValue> > ForAsync <TValue>(int fromInclusive, int toExclusive, ParallelOptions options, Func <int, Task <TValue> > func) { if (toExclusive < fromInclusive) { throw new ArgumentOutOfRangeException(nameof(toExclusive)); } if (func == null) { throw new ArgumentNullException(nameof(func)); } var dict = new ConcurrentDictionary <int, TValue>(); var opt = Build(options); var block = new TransformBlock <int, TValue>(func, opt); // Send for (var i = fromInclusive; i < toExclusive; i++) { await block.SendAsync(i, opt.CancellationToken).ConfigureAwait(false); } // Receive for (var i = fromInclusive; i < toExclusive; i++) { var value = await block.ReceiveAsync(opt.CancellationToken).ConfigureAwait(false); dict[i] = value; } block.Complete(); await block.Completion.ConfigureAwait(false); return(dict); }
private static async Task SimpleDemoWithParallelismAsync() { Console.WriteLine("TransformBlockDemo has started!"); var block = new TransformBlock <int, string>( async(input) => { await Task.Delay(500).ConfigureAwait(false); return(input.ToString()); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4 }); // how to make the same code above parallel with adjusting options instead of code for (int i = 0; i < 10; i++) { block.Post(i); Console.WriteLine($"TransformBlock input queue count: {block.InputCount}"); } block.Complete(); // No mo data. while (await block.OutputAvailableAsync().ConfigureAwait(false)) { Console.WriteLine($"TransformBlock InputCount: {block.InputCount}"); var output = await block.ReceiveAsync().ConfigureAwait(false); Console.WriteLine($"TransformBlock TransformOutput: {output}"); Console.WriteLine($"TransformBlock OutputCount: {block.OutputCount}"); } // wait for completion. await block.Completion.ConfigureAwait(false); Console.WriteLine("Finished!"); Console.ReadKey(); }
private static async void Example2() // NOTE: Асинхронный прием данных { Func <int, int> fn = i => { Thread.Sleep(TimeSpan.FromSeconds(1)); return(i * i); }; var transformBlock = new TransformBlock <int, int>(fn); for (var i = 0; i < 10; i++) { transformBlock.Post(i); } for (var i = 0; i < 10; i++) { var result = await transformBlock.ReceiveAsync(); Console.WriteLine(result); } Console.WriteLine("Done"); }
public async Task TestOrdering() { const int iters = 1000; foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 }) foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded }) { var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; var tb = new TransformBlock<int, int>(i => i, options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; } }
public async Task TestOrdering_Async_OrderedDisabled() { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false }; var tasks = new TaskCompletionSource<int>[10]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = new TaskCompletionSource<int>(); } var tb = new TransformBlock<int, int>(i => tasks[i].Task, options); tb.PostRange(0, tasks.Length); for (int i = tasks.Length - 1; i >= 0; i--) { tasks[i].SetResult(i); Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async Task TestOrdering_Sync_OrderedDisabled() { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false }; var mres = new ManualResetEventSlim(); var tb = new TransformBlock<int, int>(i => { if (i == 0) mres.Wait(); return i; }, options); tb.Post(0); tb.Post(1); Assert.Equal(1, await tb.ReceiveAsync()); mres.Set(); Assert.Equal(0, await tb.ReceiveAsync()); tb.Complete(); await tb.Completion; }
[InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool? EnsureOrdered) { const int iters = 1000; var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; if (EnsureOrdered == null) { Assert.True(options.EnsureOrdered); } else { options.EnsureOrdered = EnsureOrdered.Value; } var tb = new TransformBlock<int, int>(i => Task.FromResult(i), options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async Task TestNullTasksIgnored() { foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var tb = new TransformBlock<int, int>(i => { if ((i % 2) == 0) return null; return Task.Run(() => i); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }); const int Iters = 100; tb.PostRange(0, Iters); tb.Complete(); for (int i = 0; i < Iters; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } } await tb.Completion; } }
public async Task TestCancellationExceptionsIgnored() { var t = new TransformBlock<int, int>(i => { if ((i % 2) == 0) throw new OperationCanceledException(); return i; }); t.PostRange(0, 2); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } } await t.Completion; }
public static async Task <IReadOnlyCollection <GraphTaskResult> > Run(this TaskGraph tasks, int parallel, ILogger log, CancellationToken cancel) { async Task <GraphTaskResult> RunTask(GraphTask task) { var sw = Stopwatch.StartNew(); GraphTaskResult Result(Exception ex = null) => new GraphTaskResult { Name = task.Name, FinalStatus = task.Status, Duration = sw.Elapsed, Exception = ex }; try { if (cancel.IsCancellationRequested || tasks.DependenciesDeep(task).Any(d => d.Status.In(Cancelled, Error))) { task.Status = Cancelled; return(Result()); } task.Status = Running; log = log.ForContext("Task", task.Name); await task.Run(log, cancel); if (cancel.IsCancellationRequested) { task.Status = Cancelled; } else { task.Status = Success; } return(Result()); } catch (Exception ex) { task.Status = Error; log.Error(ex, "Task {Task} failed: {Message}", task.Name, ex.Message); return(Result(ex)); } } var block = new TransformBlock <GraphTask, GraphTaskResult>(RunTask, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = parallel }); var newTaskSignal = new AsyncManualResetEvent(true); async Task Producer() { while (!tasks.AllComplete) { if (cancel.IsCancellationRequested) { foreach (var t in tasks.All.Where(t => t.Status.IsIncomplete())) { t.Status = Cancelled; } } var tasksToAdd = tasks.AvailableToRun().ToList(); if (tasksToAdd.IsEmpty()) { // if no tasks are ready to start. Wait to either be signaled, or log which tasks are still running var logTimeTask = Task.Delay(1.Minutes(), cancel); await Task.WhenAny(logTimeTask, newTaskSignal.WaitAsync()); if (newTaskSignal.IsSet) { newTaskSignal.Reset(); } if (logTimeTask.IsCompleted) { log.Debug("Waiting for {TaskList} to complete", tasks.Running.Select(t => t.Name)); } } foreach (var task in tasksToAdd) { task.Status = Queued; await block.SendAsync(task); } } block.Complete(); } var producer = Producer(); var taskResults = new List <GraphTaskResult>(); while (await block.OutputAvailableAsync()) { var item = await block.ReceiveAsync(); taskResults.Add(item); newTaskSignal.Set(); } await Task.WhenAll(producer, block.Completion); return(taskResults); }
private async Task <IEnumerable <Task <Indexed <PinResult> > > > UpdateDedupStoreAsync( Context context, IReadOnlyList <ContentHash> contentHashes, CancellationToken cts) { if (!contentHashes.Any()) { return((new List <Task <Indexed <PinResult> > >()).AsEnumerable()); } var dedupIdentifiers = contentHashes.Select(c => ToVstsBlobIdentifier(c.ToBlobIdentifier()).ToDedupIdentifier()); var tryReferenceBlock = new TransformBlock <Indexed <VstsDedupIdentifier>, Indexed <PinResult> >( async i => { PinResult pinResult; if (i.Item.AlgorithmId == Hashing.ChunkDedupIdentifier.ChunkAlgorithmId) { pinResult = await TryPinChunkAsync(context, i.Item, cts); } else { pinResult = await TryPinNodeAsync(context, i.Item, cts); } if (pinResult.Succeeded) { BackingContentStoreExpiryCache.Instance.AddExpiry(new ContentHash(HashType.DedupNodeOrChunk, i.Item.Value), EndDateTime); } return(pinResult.WithIndex(i.Index)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DefaultMaxParallelism }); tryReferenceBlock.PostAll(dedupIdentifiers.AsIndexed()); var results = await Task.WhenAll(Enumerable.Range(0, dedupIdentifiers.ToList().Count).Select(i => tryReferenceBlock.ReceiveAsync())); tryReferenceBlock.Complete(); return(results.AsTasks().ToList()); }
/// <summary> /// Update all chunks if they exist. Returns success only if all chunks are found and extended. /// </summary> private async Task <PinResult> TryPinChunksAsync(Context context, IEnumerable <VstsDedupIdentifier> dedupIdentifiers, CancellationToken cts) { if (!dedupIdentifiers.Any()) { return(PinResult.Success); } // TODO: Support batched TryKeepUntilReferenceChunkAsync in Artifact. (bug 1428612) var tryReferenceBlock = new TransformBlock <VstsDedupIdentifier, PinResult>( async dedupId => await TryPinChunkAsync(context, dedupId, cts), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DefaultMaxParallelism }); tryReferenceBlock.PostAll(dedupIdentifiers); var pinResults = await Task.WhenAll(Enumerable.Range(0, dedupIdentifiers.ToList().Count).Select(i => tryReferenceBlock.ReceiveAsync())); tryReferenceBlock.Complete(); foreach (var result in pinResults) { if (!result.Succeeded) { return(result); // An error updating one of the chunks occured. Fail fast. } } return(PinResult.Success); }
public async Task TestFaultingAndCancellation() { foreach (bool fault in DataflowTestHelpers.BooleanValues) { var cts = new CancellationTokenSource(); var tb = new TransformBlock<int, int>(i => i, new ExecutionDataflowBlockOptions { CancellationToken = cts.Token }); tb.PostRange(0, 4); Assert.Equal(expected: 0, actual: await tb.ReceiveAsync()); Assert.Equal(expected: 1, actual: await tb.ReceiveAsync()); if (fault) { Assert.Throws<ArgumentNullException>(() => ((IDataflowBlock)tb).Fault(null)); ((IDataflowBlock)tb).Fault(new InvalidCastException()); await Assert.ThrowsAsync<InvalidCastException>(() => tb.Completion); } else { cts.Cancel(); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => tb.Completion); } Assert.Equal(expected: 0, actual: tb.InputCount); Assert.Equal(expected: 0, actual: tb.OutputCount); } }