private async Task Start(string path) { var uiScheduler = TaskScheduler.FromCurrentSynchronizationContext(); dirToFilesBlock = new TransformManyBlock <string, string>((Func <string, IEnumerable <string> >)(GetFileSystemItems), new ExecutionDataflowBlockOptions() { CancellationToken = ct }); fileActionBlock = new ActionBlock <string>((Action <string>)ProcessFile, new ExecutionDataflowBlockOptions() { CancellationToken = ct, TaskScheduler = uiScheduler }); // Order of LinkTo's important here! dirToFilesBlock.LinkTo(dirToFilesBlock, new DataflowLinkOptions() { PropagateCompletion = true }, IsDirectory); dirToFilesBlock.LinkTo(fileActionBlock, new DataflowLinkOptions() { PropagateCompletion = true }, IsRequiredDocType); // Kick off the recursion. dirToFilesBlock.Post(path); await ProcessingIsComplete(); dirToFilesBlock.Complete(); await Task.WhenAll(dirToFilesBlock.Completion, fileActionBlock.Completion); }
public FaceDecisionHelper(int maxDegreeOfParallelism, bool dropWhenFull) { ProcessingBlock = new TransformManyBlock <IFaceStream, bool>( transform: Mock.DetermineAccessAsync, dataflowBlockOptions: new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, BoundedCapacity = dropWhenFull ? maxDegreeOfParallelism : DataflowBlockOptions.Unbounded, }); DecisionBlock = new ActionBlock <bool>(decision => { bool?finalDecision = null; if (!decision) { ++_failureCount; } if (decision || _failureCount > 7) { finalDecision = decision; } if (finalDecision.HasValue) { DecisionAvailable?.Invoke(this, finalDecision.Value); ProcessingBlock.Complete(); } }); ProcessingBlock.LinkTo(DecisionBlock); ProcessingBlock.Completion.ContinueWith(delegate { DecisionBlock.Complete(); }); }
public async Task TestOrdering_Sync_BlockingEnumeration_NoDeadlock(bool ensureOrdered) { // If iteration of the yielded enumerables happened while holding a lock, this would deadlock. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = ensureOrdered }; ManualResetEventSlim mres1 = new ManualResetEventSlim(), mres2 = new ManualResetEventSlim(); var tb = new TransformManyBlock <int, int>(i => i == 0 ? BlockableIterator(mres1, mres2) : BlockableIterator(mres2, mres1), options); tb.Post(0); tb.Post(1); Assert.Equal(42, await tb.ReceiveAsync()); Assert.Equal(42, await tb.ReceiveAsync()); tb.Complete(); await tb.Completion; IEnumerable <int> BlockableIterator(ManualResetEventSlim wait, ManualResetEventSlim release) { release.Set(); wait.Wait(); yield return(42); } }
public async Task TestOrdering() { const int iters = 9999; foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 }) { foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded }) { var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; var tb = new TransformManyBlock <int, int>(i => new[] { i, i + 1, i + 2 }, options); for (int i = 0; i < iters; i += 3) { Assert.True(tb.Post(i)); } for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; } } }
internal static void Run() { const string w1 = "Hello"; const string w2 = "World"; // Create a TransformManyBlock<string, char> object that splits // a string into its individual characters. var transformManyBlock = new TransformManyBlock <string, char>(s => s.ToCharArray()); // Chained buffer block var buffer = new BufferBlock <char>(); transformManyBlock.LinkTo(buffer); transformManyBlock.Completion.ContinueWith(_ => buffer.Complete()); // Post two messages to the first block. transformManyBlock.SendAsync(w1); transformManyBlock.SendAsync(w2); transformManyBlock.Complete(); transformManyBlock.Completion.Wait(); // Receive all output values from the last block. IList <char> chars; while (!buffer.TryReceiveAll(out chars)) { // Needed if we don't wait for transformManyBlock.Completion Console.Write("."); Thread.SpinWait(10000); } Console.WriteLine(chars == null ? "null" : string.Join(", ", chars)); }
public async Task TestLinkToOptionsAsyncEnumerable() { const int Messages = 1; foreach (bool append in DataflowTestHelpers.BooleanValues) { var tb = new TransformManyBlock <int, int>(DataflowTestHelpers.ToAsyncEnumerable); var values = new int[Messages]; var targets = new ActionBlock <int> [Messages]; for (int i = 0; i < Messages; i++) { int slot = i; targets[i] = new ActionBlock <int>(item => values[slot] = item); tb.LinkTo(targets[i], new DataflowLinkOptions { MaxMessages = 1, Append = append }); } tb.PostRange(0, Messages); tb.Complete(); await tb.Completion; for (int i = 0; i < Messages; i++) { Assert.Equal( expected: append ? i : Messages - i - 1, actual: values[i]); } } }
public async Task TestPrecanceled() { var bb = new TransformManyBlock <int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) }); int ignoredValue; IList <int> ignoredValues; IDisposable link = bb.LinkTo(DataflowBlock.NullTarget <int>()); Assert.NotNull(link); link.Dispose(); Assert.False(bb.Post(42)); var t = bb.SendAsync(42); Assert.True(t.IsCompleted); Assert.False(t.Result); Assert.False(bb.TryReceiveAll(out ignoredValues)); Assert.False(bb.TryReceive(out ignoredValue)); Assert.NotNull(bb.Completion); await Assert.ThrowsAnyAsync <OperationCanceledException>(() => bb.Completion); bb.Complete(); // just make sure it doesn't throw }
public ISourceBlock <DownloadedFile> DownloadFiles(string[] fileIds, string securityCookieString, string securityCookieDomain) { var urls = CreateUrls(fileIds); // we have to use TransformManyBlock here, because we want to be able to return 0 or 1 items var block = new TransformManyBlock <string, DownloadedFile>( async url => { var httpClientHandler = new HttpClientHandler(); if (!string.IsNullOrEmpty(securityCookieString)) { var securityCookie = new Cookie(FormsAuthentication.FormsCookieName, securityCookieString); securityCookie.Domain = securityCookieDomain; httpClientHandler.CookieContainer.Add(securityCookie); } return(await DownloadFile(url, httpClientHandler)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Properties.Settings.Default.maxConcurrentDownloads }); foreach (var url in urls) { block.Post(url); } block.Complete(); return(block); }
public IEnumerable <IMetaData> TransformManyBlockUsage(string stringToSplit) { Console.WriteLine($"Inside {nameof(TplDataflow1ExecutionBlocksController)} - {nameof(TransformManyBlockUsage)}"); // Create the members of the pipeline. var transformManyBlockSplitAnInputStringIntoArray = new TransformManyBlock <string, string>(input => Functions.SplitAnInputStringIntoArray(input, SplitterSeparator) ); var transformBlockCreateASingleMedatadataFromAString = new TransformBlock <string, IMetaData>(stringInput => Functions.CreateASingleMedatadataFromAString(stringInput) ); // Connect the dataflow blocks to form a pipeline. transformManyBlockSplitAnInputStringIntoArray.LinkTo(transformBlockCreateASingleMedatadataFromAString, DataflowOptions.LinkOptions); // Start TransformManyBlockUsage pipeline with the input values. transformManyBlockSplitAnInputStringIntoArray.Post(stringToSplit); // Mark the head of the pipeline as complete. transformManyBlockSplitAnInputStringIntoArray.Complete(); // Equivalent of transformManyBlockSplitAnInputStringIntoArray.OutputCount var ouputCount = stringToSplit?.Split(SplitterSeparator, StringSplitOptions.RemoveEmptyEntries).Length ?? 0; for (var i = 0; i < ouputCount; i++) { yield return(transformBlockCreateASingleMedatadataFromAString.Receive()); } }
public async Task TestCircularLinking() { const int Iters = 200; foreach (bool sync in DataflowTestHelpers.BooleanValues) { var tcs = new TaskCompletionSource <bool>(); Func <int, IEnumerable <int> > body = i => { if (i >= Iters) { tcs.SetResult(true); } return(Enumerable.Repeat(i + 1, 1)); }; TransformManyBlock <int, int> tb = sync ? new TransformManyBlock <int, int>(body) : new TransformManyBlock <int, int>(i => Task.Run(() => body(i))); using (tb.LinkTo(tb)) { tb.Post(0); await tcs.Task; tb.Complete(); } } }
public async Task FindProjectJsonAsync() { TransformManyBlock <GitHubRepo, SearchResult> repoSearchBlock = new TransformManyBlock <GitHubRepo, SearchResult>(repo => SearchRepoAsync(repo), new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount * 4 //MaxDegreeOfParallelism = 1 }); ActionBlock <SearchResult> downloadFileBlock = new ActionBlock <SearchResult>(DownloadFileAsync, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount * 4 //MaxDegreeOfParallelism = 1 }); repoSearchBlock.LinkTo(downloadFileBlock, new DataflowLinkOptions() { PropagateCompletion = true }); foreach (var repo in _storage.GetAllRepos()) { if (_cancelToken.IsCancellationRequested) { break; } repoSearchBlock.Post(repo); } repoSearchBlock.Complete(); await downloadFileBlock.Completion; }
public async Task TestMultiplePublishersWithPropagateCompletion() { var block1 = new TransformManyBlock <string, char>(x => x.ToCharArray()); var block2 = new TransformManyBlock <string, char>(x => x.ToCharArray()); var target = new BufferBlock <char>(); var propagate = new DataflowLinkOptions { PropagateCompletion = true }; block1.LinkTo(target, propagate); block2.LinkTo(target, propagate); block1.Post("a"); // This propagates completion through to target block1.Complete(); await AssertCompletes(block1.Completion); Assert.IsTrue(await target.OutputAvailableAsync(), "target should have message waiting"); Assert.IsTrue(!target.Completion.IsCompleted, "target won't be complete until buffer empty"); // The target won't receive this, because it's already started completing (via propagation) block2.Post("b"); block2.Complete(); // Pulling this out of the buffer allows the target to complete Assert.AreEqual('a', target.Receive()); // This sholud happen pretty quickly await AssertCompletes(target.Completion); }
public LoopDataflow1() { InputMessageBlock = new TransformBlock <Message, Message>(async msg => await InputMessage(msg)); HandleMessageBlock = new TransformManyBlock <Message, Message>(async msg => await HandleMessage(msg)); OutputMessageBlock = new ActionBlock <Message>(msg => OutputMessage(msg)); var linkOptions = new DataflowLinkOptions() { PropagateCompletion = false }; InputMessageBlock.LinkTo(HandleMessageBlock, linkOptions); HandleMessageBlock.LinkTo(OutputMessageBlock, linkOptions, msg => msg.WasProcessed == true); HandleMessageBlock.LinkTo(HandleMessageBlock, linkOptions, msg => msg.WasProcessed == false); InputMessageBlock.Completion.ContinueWith(async tsk => { await HandleMessageIsComplete(); HandleMessageBlock.Complete(); }); HandleMessageBlock.Completion.ContinueWith(tsk => { OutputMessageBlock.Complete(); }); //DebuggingLoop(); }
public void Start(CoreValues cv) { CoreValues = cv; MyCamera = new Camera(cv.Origin, cv.LookAt, cv.VecUp, cv.FovY, cv.AspectRatio, cv.Aperture, cv.DistToFocus); Image = new Vec3[cv.Width * cv.Height]; World = HitableList.DefaultWorld; Count = cv.Width * cv.Height; PixelFloats = new TransformManyBlock <int, InfoStruct>(GetSampleRays); MergeColors = new ActionBlock <InfoStruct>(DoMergeColors, new ExecutionDataflowBlockOptions { EnsureOrdered = false, MaxDegreeOfParallelism = -1 }); //writeImage = new ActionBlock<InfoStruct>(DoWriteImage, new ExecutionDataflowBlockOptions { EnsureOrdered = false, MaxDegreeOfParallelism = -1 }); var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; PixelFloats.LinkTo(MergeColors, linkOptions); //MergeColors.LinkTo(writeImage, linkOptions); PixelFloats.Post(0); PixelFloats.Complete(); MergeColors.Completion.Wait(); DoSaveImage(); }
public async Task TestOrdering_Sync_OrderedDisabled(bool trustedEnumeration) { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false }; var mres = new ManualResetEventSlim(); var tb = new TransformManyBlock <int, int>(i => { if (i == 0) { mres.Wait(); } return(trustedEnumeration ? new[] { i } : Enumerable.Repeat(i, 1)); }, options); tb.Post(0); tb.Post(1); Assert.Equal(1, await tb.ReceiveAsync()); mres.Set(); Assert.Equal(0, await tb.ReceiveAsync()); tb.Complete(); await tb.Completion; }
internal void start() { var consumerBlock = Consumer("consumer 1"); var consumer2Block = Consumer("\t\tconsumer 2"); var producerBlock = new TransformManyBlock <int, int>(x => Enumerable.Range(0, x)); producerBlock.LinkTo(consumerBlock, new DataflowLinkOptions { PropagateCompletion = true, }); producerBlock.LinkTo(consumer2Block, new DataflowLinkOptions { PropagateCompletion = true }); if (!producerBlock.Post(10)) { Console.WriteLine("Failed Post"); } producerBlock.Complete(); producerBlock.Completion.ContinueWith(p => print_status(p, "producer")); consumerBlock.Completion.ContinueWith(p => print_status(p, "consumer 1")); consumer2Block.Completion.ContinueWith(p => print_status(p, "\t\tconsumer 2")); }
public override async Task ProcessAsync() { var settings = new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = 10, }; var listFilesBlock = new TransformManyBlock <string, string>(_fileSystem.GetFileNames, settings); var getVehiclesBlock = new TransformManyBlock <string, IVehicle>(GetVehicles, settings); var transformBlock = new TransformBlock <IVehicle, Truck>(TransformAsync, settings); var doubleBlock = new TransformBlock <Truck, Truck>(DoubleDoorsAsync, settings); var batchBlock = new BatchBlock <Truck>(10); var saveBlock = new ActionBlock <IEnumerable <Truck> >(SaveTrucksAsync, settings); DataflowLinkOptions linkOptions = new DataflowLinkOptions() { PropagateCompletion = true }; listFilesBlock.LinkTo(getVehiclesBlock, linkOptions); getVehiclesBlock.LinkTo(transformBlock, linkOptions); transformBlock.LinkTo(doubleBlock, linkOptions); doubleBlock.LinkTo(batchBlock, linkOptions); batchBlock.LinkTo(saveBlock, linkOptions); await listFilesBlock.SendAsync(_directory); listFilesBlock.Complete(); await saveBlock.Completion; }
public async Task TestCancellationExceptionsIgnored() { foreach (bool sync in DataflowTestHelpers.BooleanValues) { Func <int, IEnumerable <int> > body = i => { if ((i % 2) == 0) { throw new OperationCanceledException(); } return(new[] { i }); }; TransformManyBlock <int, int> t = sync ? new TransformManyBlock <int, int>(body) : new TransformManyBlock <int, int>(async i => await Task.Run(() => body(i))); t.PostRange(0, 2); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } } await t.Completion; } }
public async Task TestOrdering_Async_OrderedDisabled(bool trustedEnumeration) { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false }; var tasks = new TaskCompletionSource <IEnumerable <int> > [10]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = new TaskCompletionSource <IEnumerable <int> >(); } var tb = new TransformManyBlock <int, int>(i => tasks[i].Task, options); tb.PostRange(0, tasks.Length); for (int i = tasks.Length - 1; i >= 0; i--) { tasks[i].SetResult(trustedEnumeration ? new[] { i } : Enumerable.Repeat(i, 1)); Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async Task TestNullTasksIgnored() { foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var tb = new TransformManyBlock <int, int>(i => { if ((i % 2) == 0) { return(null); } return(Task.Run(() => (IEnumerable <int>) new[] { i })); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }); const int Iters = 100; tb.PostRange(0, Iters); tb.Complete(); for (int i = 0; i < Iters; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } } await tb.Completion; } }
public async Task RunAsync() { if (!IsBuilt) { Logger.LogError("Error run not arranged conveyor. Use {arrangeMethod}() before {runMethod}().", nameof(Arrange), nameof(RunAsync)); throw new InvalidOperationException($"Error run not arranged conveyor. Use {nameof(Arrange)}() before {nameof(RunAsync)}()."); } //Run conveyor Stopwatch stopWatch = Stopwatch.StartNew(); try { dataNamesEnumeratorBlock.Post(filePath); dataNamesEnumeratorBlock.Complete(); //Wait result await writeFilesBlock.Completion; } catch (AggregateException exs) { foreach (Exception ex in exs.InnerExceptions) { Logger.LogError("Errors while processing: {err}", ex.Message); } } stopWatch.Stop(); elapsedTime = stopWatch.ElapsedMilliseconds; }
public static async Task RunSampleOne(Func <int, Task <User[]> > getUsers, Func <User, Task> writeUser, int pageCount) { var getDataBlock = new TransformManyBlock <int, User>( transform: async(pageNumber) => await getUsers(pageNumber), dataflowBlockOptions: new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 3 }); var writeDataBlock = new ActionBlock <User>( action: async(user) => await writeUser(user), dataflowBlockOptions: new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 }); getDataBlock.LinkTo(writeDataBlock, new DataflowLinkOptions { PropagateCompletion = true }); for (var i = 1; i <= pageCount; i++) { getDataBlock.Post(i); } getDataBlock.Complete(); await writeDataBlock.Completion; }
[InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool?EnsureOrdered) { const int iters = 1000; var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; if (EnsureOrdered == null) { Assert.True(options.EnsureOrdered); } else { options.EnsureOrdered = EnsureOrdered.Value; } var tb = new TransformManyBlock <int, int>(i => Task.FromResult(Enumerable.Repeat(i, 1)), options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async Task TestProducerConsumer() { foreach (TaskScheduler scheduler in new[] { TaskScheduler.Default, new ConcurrentExclusiveSchedulerPair().ConcurrentScheduler }) { foreach (int maxMessagesPerTask in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { foreach (int dop in new[] { 1, 2 }) { foreach (int elementsPerItem in new[] { 1, 3, 5 }) { foreach (bool sync in DataflowTestHelpers.BooleanValues) { const int Messages = 50; var options = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop, MaxMessagesPerTask = maxMessagesPerTask, TaskScheduler = scheduler }; TransformManyBlock <int, int> tb = sync ? new TransformManyBlock <int, int>(i => Enumerable.Repeat(i, elementsPerItem), options) : new TransformManyBlock <int, int>(i => TaskShim.Run(() => Enumerable.Repeat(i, elementsPerItem)), options); await TaskShim.WhenAll( TaskShim.Run(async delegate { // consumer int i = 0; int processed = 0; while (await tb.OutputAvailableAsync()) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); processed++; if (processed % elementsPerItem == 0) { i++; } } }), TaskShim.Run(async delegate { // producer for (int i = 0; i < Messages; i++) { await tb.SendAsync(i); } tb.Complete(); })); } } } } } } }
public static async Task PurgeExpiredAsync(CloudBlobContainer inboxContainer) { Requires.NotNull(inboxContainer, "inboxContainer"); var deleteBlobsExpiringBefore = DateTime.UtcNow; int purgedBlobCount = 0; var searchExpiredBlobs = new TransformManyBlock <CloudBlobContainer, ICloudBlob>( async c => { try { var results = await c.ListBlobsSegmentedAsync( string.Empty, useFlatBlobListing: true, pageSize: 50, details: BlobListingDetails.Metadata, options: new BlobRequestOptions(), operationContext: null); return(from blob in results.OfType <ICloudBlob>() let expires = DateTime.Parse(blob.Metadata[ExpirationDateMetadataKey]) where expires < deleteBlobsExpiringBefore select blob); } catch (StorageException ex) { var webException = ex.InnerException as WebException; if (webException != null) { var httpResponse = (HttpWebResponse)webException.Response; if (httpResponse.StatusCode == HttpStatusCode.NotFound) { // it's legit that some tests never created the container to begin with. return(Enumerable.Empty <ICloudBlob>()); } } throw; } }, new ExecutionDataflowBlockOptions { BoundedCapacity = 4, }); var deleteBlobBlock = new ActionBlock <ICloudBlob>( blob => { Interlocked.Increment(ref purgedBlobCount); return(blob.DeleteAsync()); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100, }); searchExpiredBlobs.LinkTo(deleteBlobBlock, new DataflowLinkOptions { PropagateCompletion = true }); searchExpiredBlobs.Post(inboxContainer); searchExpiredBlobs.Complete(); await deleteBlobBlock.Completion; }
static public void ProcessingByTPL_StraightForwardImplementation() { const string pathToFiles = @"..\..\..\..\DataFiles"; string[] files = Directory.GetFiles(pathToFiles, "*.txt", SearchOption.AllDirectories); var loadDataFromFileBlock = new TransformBlock<string[], List<CustomerTextData>>(fileItems => { var factory = new CustomerTextDataFactory(); return new List<CustomerTextData>(Array.ConvertAll(fileItems, factory.LoadFromFile)); }); var filterBlock = new TransformBlock<List<CustomerTextData>, List<CustomerTextData>>(textDataList => { var filter = new FilterTextData(5); return textDataList.Where(filter.Run).ToList(); }); var toListBlock = new TransformManyBlock<List<CustomerTextData>, CustomerTextData>(textDataList => { var queue = new ConcurrentQueue<CustomerTextData>(); textDataList.ForEach(queue.Enqueue); return queue; }); var action = new ActionBlock<CustomerTextData>(textData => { var weight = new WeightTextData(); int result = weight.Run(textData); Trace.WriteLine(result); Console.WriteLine(result); }); loadDataFromFileBlock.LinkTo(filterBlock); filterBlock.LinkTo(toListBlock); toListBlock.LinkTo(action); loadDataFromFileBlock.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)filterBlock).Fault(t.Exception); else filterBlock.Complete(); }); filterBlock.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)toListBlock).Fault(t.Exception); else toListBlock.Complete(); }); toListBlock.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)action).Fault(t.Exception); else action.Complete(); }); loadDataFromFileBlock.Post(files); loadDataFromFileBlock.Complete(); action.Completion.Wait(); }
public void TestPostAsyncEnumerable() { foreach (bool bounded in DataflowTestHelpers.BooleanValues) { var tb = new TransformManyBlock <int, int>(DataflowTestHelpers.ToAsyncEnumerable, new ExecutionDataflowBlockOptions { BoundedCapacity = bounded ? 1 : -1 }); Assert.True(tb.Post(0)); tb.Complete(); Assert.False(tb.Post(0)); } }
/// <summary> /// Purges all blobs set to expire prior to the specified date. /// </summary> /// <param name="deleteBlobsExpiringBefore"> /// All blobs scheduled to expire prior to this date will be purged. The default value /// is interpreted as <see cref="DateTime.UtcNow"/>. /// </param> /// <returns>The task representing the asynchronous operation.</returns> public async Task PurgeBlobsExpiringBeforeAsync(DateTime deleteBlobsExpiringBefore = default(DateTime)) { if (deleteBlobsExpiringBefore == default(DateTime)) { deleteBlobsExpiringBefore = DateTime.UtcNow; } Requires.Argument(deleteBlobsExpiringBefore.Kind == DateTimeKind.Utc, "expirationUtc", "UTC required."); var searchExpiredDirectoriesBlock = new TransformManyBlock <CloudBlobContainer, CloudBlobDirectory>( async c => { var results = await c.ListBlobsSegmentedAsync(); return(from directory in results.OfType <CloudBlobDirectory>() let expires = DateTime.Parse(directory.Uri.Segments[directory.Uri.Segments.Length - 1].TrimEnd('/')) where expires < deleteBlobsExpiringBefore select directory); }, new ExecutionDataflowBlockOptions { BoundedCapacity = 4, }); var deleteDirectoryBlock = new TransformManyBlock <CloudBlobDirectory, CloudBlockBlob>( async directory => { var results = await directory.ListBlobsSegmentedAsync(); return(results.OfType <CloudBlockBlob>()); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = 4, }); var deleteBlobBlock = new ActionBlock <CloudBlockBlob>( blob => blob.DeleteAsync(), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100, }); searchExpiredDirectoriesBlock.LinkTo(deleteDirectoryBlock, new DataflowLinkOptions { PropagateCompletion = true }); deleteDirectoryBlock.LinkTo(deleteBlobBlock, new DataflowLinkOptions { PropagateCompletion = true }); searchExpiredDirectoriesBlock.Post(this.container); searchExpiredDirectoriesBlock.Complete(); await deleteBlobBlock.Completion; }
private IEnumerable <string> GetFilesInDirectory(string path) { var dirInfo = new DirectoryInfo(path); var subDirectories = dirInfo.GetDirectories().Select(info => info.FullName).ToArray(); _directoriesRemaining += subDirectories.Length; if (--_directoriesRemaining == 0) { _directoryBrowseBlock.Complete(); } return(dirInfo.GetFiles().Select(fileInfo => fileInfo.FullName).Concat(subDirectories)); }
public static void TestSync2() { tmb.LinkTo(ab); for (int i = 0; i < 4; i++) { tmb.Post(i); } tmb.Complete(); Console.WriteLine("Post Finished"); tmb.Completion.Wait(); Console.WriteLine("Process Finished"); }
public void Dispose() { _batchBlock.Complete(); _batchBlock.Completion.ConfigureAwait(false).GetAwaiter().GetResult(); _groupBatchBlock.Complete(); _groupBatchBlock.Completion.ConfigureAwait(false).GetAwaiter().GetResult(); _persistGroupBlock.Complete(); _persistGroupBlock.Completion.ConfigureAwait(false).GetAwaiter().GetResult(); _timer.Dispose(); }
private static void Main(string[] args) { var generatorBlock = new TransformManyBlock<int, string>(num => GenerateStrings(num)); var writerBlock = new TransformBlock<string, string>(str => WriteString(str), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 5 }); var finishingBlock = new ActionBlock<string>(str => { writerBlock.Completion.Wait(); Console.WriteLine(Thread.CurrentThread.ManagedThreadId + ": finished - " + str); }); generatorBlock.LinkTo(writerBlock, new DataflowLinkOptions{PropagateCompletion = true}); writerBlock.LinkTo(finishingBlock, new DataflowLinkOptions { PropagateCompletion = true }); for (var i = 1; i <= 3; i++) { Console.WriteLine("Posted " + i*10); generatorBlock.Post(i*10); } generatorBlock.Complete(); finishingBlock.Completion.Wait(); Console.WriteLine("Pipeline is finished"); Console.ReadKey(); }
public async Task TestPrecanceled() { var bb = new TransformManyBlock<int, int>(DataflowTestHelpers.ToEnumerable, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) }); int ignoredValue; IList<int> ignoredValues; IDisposable link = bb.LinkTo(DataflowBlock.NullTarget<int>()); Assert.NotNull(link); link.Dispose(); Assert.False(bb.Post(42)); var t = bb.SendAsync(42); Assert.True(t.IsCompleted); Assert.False(t.Result); Assert.False(bb.TryReceiveAll(out ignoredValues)); Assert.False(bb.TryReceive(out ignoredValue)); Assert.NotNull(bb.Completion); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => bb.Completion); bb.Complete(); // just make sure it doesn't throw }
public async Task TestMultipleYields() { const int Messages = 10; var t = new TransformManyBlock<int, int>(i => Enumerable.Range(0, Messages)); t.Post(42); t.Complete(); for (int i = 0; i < Messages; i++) { Assert.False(t.Completion.IsCompleted); Assert.Equal(expected: i, actual: await t.ReceiveAsync()); } await t.Completion; }
[InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool? EnsureOrdered) { const int iters = 1000; var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; if (EnsureOrdered == null) { Assert.True(options.EnsureOrdered); } else { options.EnsureOrdered = EnsureOrdered.Value; } var tb = new TransformManyBlock<int, int>(i => Task.FromResult(Enumerable.Repeat(i, 1)), options); tb.PostRange(0, iters); for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async Task TestNullTasksIgnored() { foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var tb = new TransformManyBlock<int, int>(i => { if ((i % 2) == 0) return null; return Task.Run(() => (IEnumerable<int>)new[] { i }); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }); const int Iters = 100; tb.PostRange(0, Iters); tb.Complete(); for (int i = 0; i < Iters; i++) { if ((i % 2) != 0) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } } await tb.Completion; } }
public async Task TestOrdering_Sync_OrderedDisabled(bool trustedEnumeration) { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false }; var mres = new ManualResetEventSlim(); var tb = new TransformManyBlock<int, int>(i => { if (i == 0) mres.Wait(); return trustedEnumeration ? new[] { i } : Enumerable.Repeat(i, 1); }, options); tb.Post(0); tb.Post(1); Assert.Equal(1, await tb.ReceiveAsync()); mres.Set(); Assert.Equal(0, await tb.ReceiveAsync()); tb.Complete(); await tb.Completion; }
public async Task TestOrdering_Async_OrderedDisabled(bool trustedEnumeration) { // If ordering were enabled, this test would hang. var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false }; var tasks = new TaskCompletionSource<IEnumerable<int>>[10]; for (int i = 0; i < tasks.Length; i++) { tasks[i] = new TaskCompletionSource<IEnumerable<int>>(); } var tb = new TransformManyBlock<int, int>(i => tasks[i].Task, options); tb.PostRange(0, tasks.Length); for (int i = tasks.Length - 1; i >= 0; i--) { tasks[i].SetResult(trustedEnumeration ? new[] { i } : Enumerable.Repeat(i, 1)); Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; }
public async static Task<Animat> SeedAsync(ScenarioSpecies species, GeoRect geoRect, Bathymetry bathymetry) { var yxzFileName = Path.Combine(Path.GetTempPath(), Path.GetFileNameWithoutExtension(Path.GetRandomFileName()) + ".txt"); bathymetry.ToYXZ(yxzFileName, -1); var mbs = new C3mbs(); mbsRESULT mbsResult; if (mbsRESULT.OK != (mbsResult = mbs.SetOutputDirectory(Path.GetTempPath()))) throw new AnimatInterfaceMMBSException("SetOutputDirectory Error:" + mbs.ResultToTc(mbsResult)); var config = mbs.GetConfiguration(); config.enabled = false; // binary output enabled/disabled config.durationLess = true; // make sure we're in durationless mode. mbs.SetConfiguration(config); mbsResult = mbs.LoadBathymetryFromTextFile(yxzFileName); if (mbsRESULT.OK != mbsResult) throw new AnimatInterfaceMMBSException("Bathymetry failed to load: " + mbs.ResultToTc(mbsResult)); mbsResult = mbs.AddSpecies(species.SpeciesDefinitionFilePath); if (mbsRESULT.OK != mbsResult) throw new AnimatInterfaceMMBSException(string.Format("C3mbs::AddSpecies FATAL error {0} for species {1}", mbs.ResultToTc(mbsResult), species.SpeciesDefinitionFilePath)); var bounds = new GeoArray(geoRect.NorthWest, geoRect.NorthEast, geoRect.SouthEast, geoRect.SouthWest, geoRect.NorthWest); var result = new Animat { ScenarioSpecies = species }; var area = bounds.Area; //Debug.WriteLine("Area: {0}",area); var transformManyBlock = new TransformManyBlock<int, Geo<float>>(count => { var geos = new List<Geo<float>>(); for (var i = 0; i < count; i++) { var location = bounds.RandomLocationWithinPerimeter(); var depth = bathymetry.Samples.GetNearestPointAsync(location).Result.Data; mbsRESULT retval; lock (mbs) retval = mbs.AddIndividualAnimat(0, new mbsPosition { latitude = location.Latitude, longitude = location.Longitude, depth = 0 }); if (mbsRESULT.OK == retval) geos.Add(new Geo<float>(location.Latitude, location.Longitude, (float)(depth * Random.NextDouble()))); } return geos; }, new ExecutionDataflowBlockOptions { TaskScheduler = TaskScheduler.Default, BoundedCapacity = -1, MaxDegreeOfParallelism = -1, }); var bufferBlock = new BufferBlock<Geo<float>>(); transformManyBlock.LinkTo(bufferBlock); var population = (int)Math.Round(area * species.PopulationDensity); result.TotalAnimats = population; const int blockSize = 100; while (population > 0) { transformManyBlock.Post(population > blockSize ? blockSize : population); population -= blockSize; } transformManyBlock.Complete(); await transformManyBlock.Completion; //mbsResult = mbs.InitializeRun(); //if (mbsRESULT.OK == mbsResult) while (mbsRUNSTATE.INITIALIZING == mbs.GetRunState()) Thread.Sleep(1); //else throw new AnimatInterfaceMMBSException("C3mbs::Initialize FATAL error " + mbs.ResultToTc(mbsResult)); mbsResult = mbs.FinishRun(); if (mbsRESULT.OK != mbsResult) throw new AnimatInterfaceMMBSException("C3mbs::FinishRun FATAL error " + mbs.ResultToTc(mbsResult)); IList<Geo<float>> animatGeos; if (bufferBlock.TryReceiveAll(out animatGeos)) result.Locations.AddRange(animatGeos); return result; }
public static async Task PurgeExpiredAsync(CloudBlobContainer inboxContainer) { Requires.NotNull(inboxContainer, "inboxContainer"); var deleteBlobsExpiringBefore = DateTime.UtcNow; int purgedBlobCount = 0; var searchExpiredBlobs = new TransformManyBlock<CloudBlobContainer, ICloudBlob>( async c => { try { var results = await c.ListBlobsSegmentedAsync( string.Empty, useFlatBlobListing: true, pageSize: 50, details: BlobListingDetails.Metadata, options: new BlobRequestOptions(), operationContext: null); return from blob in results.OfType<ICloudBlob>() let expires = DateTime.Parse(blob.Metadata[ExpirationDateMetadataKey]) where expires < deleteBlobsExpiringBefore select blob; } catch (StorageException ex) { var webException = ex.InnerException as WebException; if (webException != null) { var httpResponse = (HttpWebResponse)webException.Response; if (httpResponse.StatusCode == HttpStatusCode.NotFound) { // it's legit that some tests never created the container to begin with. return Enumerable.Empty<ICloudBlob>(); } } throw; } }, new ExecutionDataflowBlockOptions { BoundedCapacity = 4, }); var deleteBlobBlock = new ActionBlock<ICloudBlob>( blob => { Interlocked.Increment(ref purgedBlobCount); return blob.DeleteAsync(); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100, }); searchExpiredBlobs.LinkTo(deleteBlobBlock, new DataflowLinkOptions { PropagateCompletion = true }); searchExpiredBlobs.Post(inboxContainer); searchExpiredBlobs.Complete(); await deleteBlobBlock.Completion; }
internal static bool TransformManyEnumerableToAction() { const int ITERS = 2; var data = new[] { 1 }; var tm = new TransformManyBlock<int, int>(i => data); int completedCount = 0; var c = new ActionBlock<int>(i => completedCount++); tm.LinkWithCompletion(c); for (int i = 0; i < ITERS; i++) tm.Post(i); tm.Complete(); c.Completion.Wait(); return completedCount == ITERS; }
public void RunTransformManyBlockConformanceTests() { bool passed = true; #region Sync { // Do everything twice - once through OfferMessage and Once through Post for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++) { Func<DataflowBlockOptions, TargetProperties<int>> transformManyBlockFactory = options => { TransformManyBlock<int, int> transformManyBlock = new TransformManyBlock<int, int>(i => new[] { i }, (ExecutionDataflowBlockOptions)options); ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options); transformManyBlock.LinkTo(actionBlock); return new TargetProperties<int> { Target = transformManyBlock, Capturer = actionBlock, ErrorVerifyable = false }; }; CancellationTokenSource cancellationSource = new CancellationTokenSource(); var defaultOptions = new ExecutionDataflowBlockOptions(); var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 10 }; var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 100, CancellationToken = cancellationSource.Token }; passed &= FeedTarget(transformManyBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, defaultOptions, 10, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, dopOptions, 1000, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true); } // Test chained Post/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => new[] { i * 2 })); for (int i = 0; i < ITERS; i++) { network.Post(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => new[] { i * 2 })); for (int i = 0; i < ITERS; i++) { network.SendAsync(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained Post all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => new[] { i * 2 })); for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true; for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16; Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => new[] { i * 2 })); var tasks = new Task[ITERS]; for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i); Task.WaitAll(tasks); int total = 0; for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive(); localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16)); Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test multiple yielded results { bool localPassed = true; var t = new TransformManyBlock<int, int>(i => { return Enumerable.Range(0, 10); }); t.Post(42); t.Complete(); for (int i = 0; i < 10; i++) { localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: Test multiple yielded results", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that OperationCanceledExceptions are ignored { bool localPassed = true; var t = new TransformManyBlock<int, int>(i => { if ((i % 2) == 0) throw new OperationCanceledException(); return new[] { i }; }); for (int i = 0; i < 10; i++) t.Post(i); t.Complete(); for (int i = 0; i < 10; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test using a precanceled token { bool localPassed = true; try { var cts = new CancellationTokenSource(); cts.Cancel(); var dbo = new ExecutionDataflowBlockOptions { CancellationToken = cts.Token }; var t = new TransformManyBlock<int, int>(i => new[] { i }, dbo); int ignoredValue; IList<int> ignoredValues; localPassed &= t.LinkTo(new ActionBlock<int>(delegate { })) != null; localPassed &= t.SendAsync(42).Result == false; localPassed &= t.TryReceiveAll(out ignoredValues) == false; localPassed &= t.Post(42) == false; localPassed &= t.OutputCount == 0; localPassed &= t.TryReceive(out ignoredValue) == false; localPassed &= t.Completion != null; t.Complete(); } catch (Exception) { localPassed = false; } Console.WriteLine(" > {0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting { bool localPassed = true; var t = new TransformManyBlock<int, int>(new Func<int, IEnumerable<int>>(i => { throw new InvalidOperationException(); })); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" > {0}: Faulted handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test reuse of a list and array { bool localPassed = true; foreach (bool bounded in new[] { false, true }) { for (int dop = 1; dop < Environment.ProcessorCount; dop++) { var dbo = bounded ? new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = 2 } : new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }; foreach (IList<int> list in new IList<int>[] { new int[1], new List<int>() { 0 }, new Collection<int>() { 0 } }) { int nextExpectedValue = 1; TransformManyBlock<int, int> tmb1 = null; tmb1 = new TransformManyBlock<int, int>(i => { if (i == 1000) { tmb1.Complete(); return (IEnumerable<int>)null; } else if (dop == 1) { list[0] = i + 1; return (IEnumerable<int>)list; } else if (list is int[]) { return new int[1] { i + 1 }; } else if (list is List<int>) { return new List<int>() { i + 1 }; } else return new Collection<int>() { i + 1 }; }, dbo); TransformBlock<int, int> tmb2 = new TransformBlock<int, int>(i => { if (i != nextExpectedValue) { localPassed = false; tmb1.Complete(); } nextExpectedValue++; return i; }); tmb1.LinkTo(tmb2); tmb2.LinkTo(tmb1); tmb1.SendAsync(0).Wait(); tmb1.Completion.Wait(); } } } Console.WriteLine(" > {0}: Reuse of a list and array", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test throwing an OCE { bool localPassed = true; foreach (bool bounded in new[] { true, false }) { for (int dop = 1; dop < Environment.ProcessorCount; dop++) { var dbo = bounded ? new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = 2 } : new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }; foreach (int mode in new[] { 0, 1, 2 }) { const int ITERS = 50; var mres = new ManualResetEventSlim(); var tmb = new TransformManyBlock<int, int>(i => { if (i < ITERS - 1) throw new OperationCanceledException(); if (mode == 0) return new int[] { i }; else if (mode == 1) return new List<int>() { i }; else return Enumerable.Repeat(i, 1); }, dbo); var ab = new ActionBlock<int>(i => { if (i != ITERS - 1) localPassed = false; mres.Set(); }); tmb.LinkTo(ab); for (int i = 0; i < ITERS; i++) tmb.SendAsync(i).Wait(); mres.Wait(); } } } Console.WriteLine("{0}: Canceled invocation", localPassed ? "Success" : "Failure"); passed &= localPassed; } } #endregion #region Async { // Do everything twice - once through OfferMessage and Once through Post for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++) { Func<DataflowBlockOptions, TargetProperties<int>> transformManyBlockFactory = options => { TransformManyBlock<int, int> transformManyBlock = new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i }), (ExecutionDataflowBlockOptions)options); ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options); transformManyBlock.LinkTo(actionBlock); return new TargetProperties<int> { Target = transformManyBlock, Capturer = actionBlock, ErrorVerifyable = false }; }; CancellationTokenSource cancellationSource = new CancellationTokenSource(); var defaultOptions = new ExecutionDataflowBlockOptions(); var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 10 }; var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 100, CancellationToken = cancellationSource.Token }; passed &= FeedTarget(transformManyBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, defaultOptions, 10, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, dopOptions, 1000, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true); passed &= FeedTarget(transformManyBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true); } // Test chained Post/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i * 2 }))); for (int i = 0; i < ITERS; i++) { network.Post(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i * 2 }))); for (int i = 0; i < ITERS; i++) { network.SendAsync(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained Post all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i * 2 }))); for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true; for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16; Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformManyBlock<int, int>, int>(4, () => new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i * 2 }))); var tasks = new Task[ITERS]; for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i); Task.WaitAll(tasks); int total = 0; for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive(); localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16)); Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test multiple yielded results { bool localPassed = true; var t = new TransformManyBlock<int, int>(i => Task.Factory.StartNew(() => (IEnumerable<int>)Enumerable.Range(0, 10).ToArray())); t.Post(42); t.Complete(); for (int i = 0; i < 10; i++) { localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: Test multiple yielded results", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that OperationCanceledExceptions are ignored { bool localPassed = true; var t = new TransformManyBlock<int, int>(i => { if ((i % 2) == 0) throw new OperationCanceledException(); return new[] { i }; }); for (int i = 0; i < 10; i++) t.Post(i); t.Complete(); for (int i = 0; i < 10; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that null tasks are ignored { bool localPassed = true; var t = new TransformManyBlock<int, int>(i => { if ((i % 2) == 0) return null; return Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i }); }); for (int i = 0; i < 10; i++) t.Post(i); t.Complete(); for (int i = 0; i < 10; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that null tasks are ignored when a reordering buffer is in place { bool localPassed = true; var t = new TransformManyBlock<int, int>(new Func<int, Task<IEnumerable<int>>>(i => { if (i == 0) { Task.Delay(1000).Wait(); return null; } return Task.Factory.StartNew(() => (IEnumerable<int>)new[] { i }); }), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2 }); t.Post(0); t.Post(1); try { localPassed &= t.Receive(TimeSpan.FromSeconds(4)) == 1; } catch { localPassed = false; } Console.WriteLine("{0}: null tasks are ignored with reordering buffer", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting from the delegate { bool localPassed = true; var t = new TransformManyBlock<int, int>(new Func<int, Task<IEnumerable<int>>>(i => { throw new InvalidOperationException(); })); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" > {0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting from the task { bool localPassed = true; var t = new TransformManyBlock<int, int>(new Func<int, Task<IEnumerable<int>>>(i => Task<IEnumerable<int>>.Factory.StartNew(() => { throw new InvalidOperationException(); }))); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" > {0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test reuse of a list and array { bool localPassed = true; foreach (bool bounded in new[] { false, true }) { for (int dop = 1; dop < Environment.ProcessorCount; dop++) { var dbo = bounded ? new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = 2 } : new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }; foreach (IList<int> list in new IList<int>[] { new int[1], new List<int>() { 0 }, new Collection<int>() { 0 } }) { int nextExpectedValue = 1; TransformManyBlock<int, int> tmb1 = null; tmb1 = new TransformManyBlock<int, int>(i => { return Task.Factory.StartNew(() => { if (i == 1000) { tmb1.Complete(); return (IEnumerable<int>)null; } else if (dop == 1) { list[0] = i + 1; return (IEnumerable<int>)list; } else if (list is int[]) { return new int[1] { i + 1 }; } else if (list is List<int>) { return new List<int>() { i + 1 }; } else return new Collection<int>() { i + 1 }; }); }, dbo); TransformBlock<int, int> tmb2 = new TransformBlock<int, int>(i => { if (i != nextExpectedValue) { localPassed = false; tmb1.Complete(); } nextExpectedValue++; return i; }); tmb1.LinkTo(tmb2); tmb2.LinkTo(tmb1); tmb1.SendAsync(0).Wait(); tmb1.Completion.Wait(); } } } Console.WriteLine(" > {0}: Reuse of a list and array", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test throwing an OCE { bool localPassed = true; foreach (bool bounded in new[] { true, false }) { for (int dop = 1; dop < Environment.ProcessorCount; dop++) { var dbo = bounded ? new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, BoundedCapacity = 2 } : new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop }; foreach (int mode in new[] { 0, 1, 2 }) { const int ITERS = 50; var mres = new ManualResetEventSlim(); var tmb = new TransformManyBlock<int, int>(i => { var cts = new CancellationTokenSource(); return Task.Factory.StartNew(() => { if (i < ITERS - 1) { cts.Cancel(); cts.Token.ThrowIfCancellationRequested(); } if (mode == 0) return new int[] { i }; else if (mode == 1) return new List<int>() { i }; else return Enumerable.Repeat(i, 1); }, cts.Token); }, dbo); var ab = new ActionBlock<int>(i => { if (i != ITERS - 1) localPassed = false; mres.Set(); }); tmb.LinkTo(ab); for (int i = 0; i < ITERS; i++) tmb.SendAsync(i).Wait(); mres.Wait(); } } } Console.WriteLine(" > {0}: Canceled invocation", localPassed ? "Success" : "Failure"); passed &= localPassed; } } #endregion Assert.True(passed, "Test failed."); }
public void NullResultTest () { bool received = false; var transformMany = new TransformManyBlock<int, int> (i => (IEnumerable<int>)null); var action = new ActionBlock<int> (i => received = true); transformMany.LinkTo (action); Assert.IsTrue (transformMany.Post (1)); transformMany.Complete (); Assert.IsTrue (transformMany.Completion.Wait (100)); Assert.IsFalse (received); }
public void AsyncNullTest () { var scheduler = new TestScheduler (); var block = new TransformManyBlock<int, int> ( i => (Task<IEnumerable<int>>)null, new ExecutionDataflowBlockOptions { TaskScheduler = scheduler }); Assert.IsTrue (block.Post (1)); scheduler.ExecuteAll (); Assert.IsFalse (block.Completion.Wait (100)); block.Complete (); Assert.IsTrue (block.Completion.Wait (100)); }
public async Task TransformManyEnumerableToAction() { var data = new[] { 1 }; var tm = new TransformManyBlock<int, int>(i => data); int completedCount = 0; var c = new ActionBlock<int>(i => completedCount++); tm.LinkTo(c, new DataflowLinkOptions { PropagateCompletion = true }); tm.PostRange(0, Iterations); tm.Complete(); await c.Completion; Assert.Equal(expected: Iterations, actual: completedCount); }
public async Task TestOrdering() { const int iters = 9999; foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 }) foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded }) { var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt }; var tb = new TransformManyBlock<int, int>(i => new[] { i, i + 1, i + 2 }, options); for (int i = 0; i < iters; i += 3) { Assert.True(tb.Post(i)); } for (int i = 0; i < iters; i++) { Assert.Equal(expected: i, actual: await tb.ReceiveAsync()); } tb.Complete(); await tb.Completion; } }
public async static Task<Animat> SeedAsyncWithout3MB(ScenarioSpecies species, GeoRect geoRect, Bathymetry bathymetry) { var bounds = new GeoArray(geoRect.NorthWest, geoRect.NorthEast, geoRect.SouthEast, geoRect.SouthWest, geoRect.NorthWest); var result = new Animat { ScenarioSpecies = species }; var area = bounds.Area; //Debug.WriteLine("Area: {0}",area); var transformManyBlock = new TransformManyBlock<int, Geo<float>>(count => { var geos = new List<Geo<float>>(); for (var i = 0; i < count; i++) { var location = bounds.RandomLocationWithinPerimeter(); var depth = bathymetry.Samples.GetNearestPointAsync(location).Result.Data; if (depth < -50) geos.Add(new Geo<float>(location.Latitude, location.Longitude, (float)(depth * Random.NextDouble()))); } return geos; }, new ExecutionDataflowBlockOptions { TaskScheduler = TaskScheduler.Default, BoundedCapacity = -1, MaxDegreeOfParallelism = -1, }); var bufferBlock = new BufferBlock<Geo<float>>(); transformManyBlock.LinkTo(bufferBlock); var population = (int)Math.Round(area * species.PopulationDensity); result.TotalAnimats = population; const int blockSize = 100; while (population > 0) { transformManyBlock.Post(population > blockSize ? blockSize : population); population -= blockSize; } transformManyBlock.Complete(); await transformManyBlock.Completion; IList<Geo<float>> animatGeos; if (bufferBlock.TryReceiveAll(out animatGeos)) result.Locations.AddRange(animatGeos); return result; }
/// <summary> /// Purges all blobs set to expire prior to the specified date. /// </summary> /// <param name="deleteBlobsExpiringBefore"> /// All blobs scheduled to expire prior to this date will be purged. The default value /// is interpreted as <see cref="DateTime.UtcNow"/>. /// </param> /// <returns>The task representing the asynchronous operation.</returns> public async Task PurgeBlobsExpiringBeforeAsync(DateTime deleteBlobsExpiringBefore = default(DateTime)) { if (deleteBlobsExpiringBefore == default(DateTime)) { deleteBlobsExpiringBefore = DateTime.UtcNow; } Requires.Argument(deleteBlobsExpiringBefore.Kind == DateTimeKind.Utc, "expirationUtc", "UTC required."); var searchExpiredDirectoriesBlock = new TransformManyBlock<CloudBlobContainer, CloudBlobDirectory>( async c => { var results = await c.ListBlobsSegmentedAsync(); return from directory in results.OfType<CloudBlobDirectory>() let expires = DateTime.Parse(directory.Uri.Segments[directory.Uri.Segments.Length - 1].TrimEnd('/')) where expires < deleteBlobsExpiringBefore select directory; }, new ExecutionDataflowBlockOptions { BoundedCapacity = 4, }); var deleteDirectoryBlock = new TransformManyBlock<CloudBlobDirectory, CloudBlockBlob>( async directory => { var results = await directory.ListBlobsSegmentedAsync(); return results.OfType<CloudBlockBlob>(); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = 4, }); var deleteBlobBlock = new ActionBlock<CloudBlockBlob>( blob => blob.DeleteAsync(), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 4, BoundedCapacity = 100, }); searchExpiredDirectoriesBlock.LinkTo(deleteDirectoryBlock, new DataflowLinkOptions { PropagateCompletion = true }); deleteDirectoryBlock.LinkTo(deleteBlobBlock, new DataflowLinkOptions { PropagateCompletion = true }); searchExpiredDirectoriesBlock.Post(this.container); searchExpiredDirectoriesBlock.Complete(); await deleteBlobBlock.Completion; }
static void Main(string[] args) { // // Create the members of the pipeline. // // Downloads the requested resource as a string. var downloadString = new TransformBlock<string, string>(uri => { Console.WriteLine("Downloading '{0}'...", uri); return new WebClient().DownloadString(uri); }); // Separates the specified text into an array of words. var createWordList = new TransformBlock<string, string[]>(text => { Console.WriteLine("Creating word list..."); // Remove common punctuation by replacing all non-letter characters // with a space character to. char[] tokens = text.ToArray(); for (int i = 0; i < tokens.Length; i++) { if (!char.IsLetter(tokens[i])) tokens[i] = ' '; } text = new string(tokens); // Separate the text into an array of words. return text.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries); }); // Removes short words, orders the resulting words alphabetically, // and then remove duplicates. var filterWordList = new TransformBlock<string[], string[]>(words => { Console.WriteLine("Filtering word list..."); return words.Where(word => word.Length > 3).OrderBy(word => word) .Distinct().ToArray(); }); // Finds all words in the specified collection whose reverse also // exists in the collection. var findReversedWords = new TransformManyBlock<string[], string>(words => { Console.WriteLine("Finding reversed words..."); // Holds reversed words. var reversedWords = new ConcurrentQueue<string>(); // Add each word in the original collection to the result whose // reversed word also exists in the collection. Parallel.ForEach(words, word => { // Reverse the work. string reverse = new string(word.Reverse().ToArray()); // Enqueue the word if the reversed version also exists // in the collection. if (Array.BinarySearch<string>(words, reverse) >= 0 && word != reverse) { reversedWords.Enqueue(word); } }); return reversedWords; }); // Prints the provided reversed words to the console. var printReversedWords = new ActionBlock<string>(reversedWord => { Console.WriteLine("Found reversed words {0}/{1}", reversedWord, new string(reversedWord.Reverse().ToArray())); }); // // Connect the dataflow blocks to form a pipeline. // downloadString.LinkTo(createWordList); createWordList.LinkTo(filterWordList); filterWordList.LinkTo(findReversedWords); findReversedWords.LinkTo(printReversedWords); // // For each completion task in the pipeline, create a continuation task // that marks the next block in the pipeline as completed. // A completed dataflow block processes any buffered elements, but does // not accept new elements. // downloadString.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)createWordList).Fault(t.Exception); else createWordList.Complete(); }); createWordList.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)filterWordList).Fault(t.Exception); else filterWordList.Complete(); }); filterWordList.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)findReversedWords).Fault(t.Exception); else findReversedWords.Complete(); }); findReversedWords.Completion.ContinueWith(t => { if (t.IsFaulted) ((IDataflowBlock)printReversedWords).Fault(t.Exception); else printReversedWords.Complete(); }); // Process "The Iliad of Homer" by Homer. downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt"); // Mark the head of the pipeline as complete. The continuation tasks // propagate completion through the pipeline as each part of the // pipeline finishes. downloadString.Complete(); // Wait for the last block in the pipeline to process all messages. printReversedWords.Completion.Wait(); Console.ReadLine(); }