public async Task Generate() { DataflowLinkOptions linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; ExecutionDataflowBlockOptions readOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.ReadThreadCount }; ExecutionDataflowBlockOptions writeOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.WriteThreadCount }; ExecutionDataflowBlockOptions processOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.ProcessThreadCount }; var readTransform = new TransformBlock <string, Task <string> >((readPath) => config.Reader.ReadTextAsync(readPath), readOptions); var sourceToTestfileTransform = new TransformManyBlock <Task <string>, PathContentPair>((readSourceTask) => config.TemplateGenerator.Generate(readSourceTask.Result), processOptions); var writeAction = new ActionBlock <PathContentPair>((pathTextPair) => config.Writer.WriteTextAsync(pathTextPair).Wait(), writeOptions); readTransform.LinkTo(sourceToTestfileTransform, linkOptions); sourceToTestfileTransform.LinkTo(writeAction, linkOptions); foreach (string readPath in config.ReadPaths) { await readTransform.SendAsync(readPath); } readTransform.Complete(); await writeAction.Completion; }
public async Task <IEnumerable <ResolvedDomain> > ResolveDomains(IEnumerable <string> domains) { var results = new List <ResolvedDomain>(domains.Count()); var resolveBlock = new TransformBlock <string, ResolvedDomain>(d => Resolve(d), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = _maxParallelism }); var bufferBlock = new BatchBlock <ResolvedDomain>(_bufferSize); var insertBlock = new ActionBlock <ResolvedDomain[]>(domMod => results.AddRange(domMod)); resolveBlock.LinkTo(bufferBlock, new DataflowLinkOptions { PropagateCompletion = true }); bufferBlock.LinkTo(insertBlock, new DataflowLinkOptions { PropagateCompletion = true }); foreach (var dom in domains) { await resolveBlock.SendAsync(dom); } resolveBlock.Complete(); await insertBlock.Completion; return(results); }
public async Task <IReadOnlyCollection <WeatherForecastResponse> > Execute(IReadOnlyList <string> locations) { var transformBlock = new TransformBlock <string, IReadOnlyCollection <WeatherForecastResponse> >( _dummyApiHttpClient.GetWeatherForecast, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = ThrottlerBenchmark.MaxConcurrency } ); var buffer = new BufferBlock <IReadOnlyCollection <WeatherForecastResponse> >(); transformBlock.LinkTo(buffer); for (var i = 0; i < locations.Count; i++) { await transformBlock.SendAsync(locations[i]); } transformBlock.Complete(); await transformBlock.Completion; return(buffer.TryReceiveAll(out var forecasts) ? forecasts.SelectMany(x => x).ToArray() : throw new Exception("Error when trying to receive items from Buffer")); }
public void Start() { if (_propagationBlock != null && !_propagationBlock.Completion.IsCompleted) { throw new DataflowException("You must call StopInput first, as the previous run is still pending"); } InternalBuffer = CreateBuffer(); _propagationBlock = CreatePropagationBlock(); _feederTask = Task.Run(async() => { while (true) { if (!InternalBuffer.TryTake(out var item)) { Thread.Sleep(2000); continue; } _semaphore.Set(); await _propagationBlock.SendAsync(item); } }, _cts.Token); //InternalBuffer.LinkTo(_propagationBlock, new DataflowLinkOptions { PropagateCompletion = true }); EnsureFetchTaskIsRunning(); }
public override void Run() { TransformBlock <int, RunModel> producer = new TransformBlock <int, RunModel>((index) => new RunModel(index)); BroadcastBlock <RunModel> publisher = new BroadcastBlock <RunModel>(null); BufferBlock <RunModel> subscriber_1 = new BufferBlock <RunModel>(); BufferBlock <RunModel> subscriber_2 = new BufferBlock <RunModel>(); ActionBlock <RunModel> processer_1 = new ActionBlock <RunModel>(model => Helper.PrintLine($"subscriber_1 processing {model.Name}")); ActionBlock <RunModel> processer_2 = new ActionBlock <RunModel>(model => Helper.PrintLine($"subscriber_2 processing {model.Name}")); producer.LinkTo(publisher); // 订阅者_1 仅订阅部分消息 publisher.LinkTo(subscriber_1, model => model.Index < 5); publisher.LinkTo(subscriber_2); subscriber_1.LinkTo(processer_1); subscriber_2.LinkTo(processer_2); // 传播完成消息 producer.Completion.ContinueWith((pre) => publisher.Complete()); publisher.Completion.ContinueWith((pre) => subscriber_1.Complete()); publisher.Completion.ContinueWith((pre) => subscriber_2.Complete()); subscriber_1.Completion.ContinueWith((pre) => processer_1.Complete()); subscriber_2.Completion.ContinueWith((pre) => processer_2.Complete()); Enumerable.Range(0, 10).ToList().ForEach(index => producer.SendAsync(index)); producer.Complete(); // 等待管道尾部完成 processer_1.Completion.Wait(); processer_2.Completion.Wait(); }
private void StartEngineInternal(bool isInfintite, int tactsCount, CancellationToken cancellationToken) { if (_isRunning) { return; } Task.Run(async() => { var counter = 0; while (isInfintite || counter < tactsCount) { await _transformBlock.SendAsync(0); if (cancellationToken.IsCancellationRequested) { return; } counter++; } }, cancellationToken).ContinueWith((task) => { _transformBlock.Complete(); }); _isRunning = true; }
public async Task AddTransactionsAsync(IEnumerable <Transaction> transactions) { if (_bestChainHash == Hash.Empty) { return; } foreach (var transaction in transactions) { var transactionId = transaction.GetHash(); var queuedTransaction = new QueuedTransaction { TransactionId = transactionId, Transaction = transaction, EnqueueTime = TimestampHelper.GetUtcNow() }; var sendResult = await _processTransactionJobs.SendAsync(queuedTransaction); if (sendResult) { continue; } await LocalEventBus.PublishAsync(new TransactionValidationStatusChangedEvent { TransactionId = transactionId, TransactionResultStatus = TransactionResultStatus.NodeValidationFailed, Error = "Failed to enter tx hub." }); Logger.LogWarning($"Process transaction:{queuedTransaction.TransactionId} failed."); } }
public async Task Start() { Reader fileReader = new Reader(); Writer fileWriter = new Writer(fFolder); var reader = new TransformBlock <string, string>(filename => fileReader.ReadFromFile(filename), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = fMaxDegreeOfParallelism }); var transformer = new TransformManyBlock <string, string>(data => GeneratorTestClasses.Start(data), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = fMaxDegreeOfParallelism }); var writer = new ActionBlock <string>(text => fileWriter.WritetoFile(text), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = fMaxDegreeOfParallelism }); var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; reader.LinkTo(transformer, linkOptions); transformer.LinkTo(writer, linkOptions); foreach (string filename in fFilenames) { await reader.SendAsync(filename); } reader.Complete(); writer.Completion.Wait(); }
public async Task <IEnumerable <TOutput> > ConcurrentAsync <TInput, TOutput>(Func <TInput, TOutput> action, IEnumerable <TInput> inputs, int concurrency, CancellationToken token = default(CancellationToken)) { IEnumerable <TOutput> output = Enumerable.Empty <TOutput>(); ExecutionDataflowBlockOptions transformOptions = new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = concurrency, CancellationToken = token }; TransformBlock <TInput, TOutput> transformBlock = new TransformBlock <TInput, TOutput>(action, transformOptions); DataflowBlockOptions bufferOptions = new DataflowBlockOptions() { CancellationToken = token }; BufferBlock <TOutput> bufferBlock = new BufferBlock <TOutput>(bufferOptions); using (transformBlock.LinkTo(bufferBlock)) { foreach (TInput input in inputs) { await transformBlock.SendAsync(input, token); } transformBlock.Complete(); await transformBlock.Completion; output = bufferBlock.TryReceiveAll(out IList <TOutput> items) ? items : Enumerable.Empty <TOutput>(); } return(output); }
public async Task TestPrecanceled() { var bb = new TransformBlock <int, int>(i => i, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) }); int ignoredValue; IList <int> ignoredValues; IDisposable link = bb.LinkTo(DataflowBlock.NullTarget <int>()); Assert.NotNull(link); link.Dispose(); Assert.False(bb.Post(42)); var t = bb.SendAsync(42); Assert.True(t.IsCompleted); Assert.False(t.Result); Assert.False(bb.TryReceiveAll(out ignoredValues)); Assert.False(bb.TryReceive(out ignoredValue)); Assert.NotNull(bb.Completion); await Assert.ThrowsAnyAsync <OperationCanceledException>(() => bb.Completion); bb.Complete(); // just make sure it doesn't throw }
public async Task <IEnumerable <DummyData> > LoadAsync(IEnumerable <Uri> uris) { IList <DummyData> result; using (var client = new HttpClient()) { // downloader block with parallelism limit var downloader = new TransformBlock <Uri, string>( async u => await client.GetStringAsync(u), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = _maxParallelism, EnsureOrdered = false, SingleProducerConstrained = true }); // deserializer, unbound parallelism // (since we throttle producer (downloader), it won't probably spin more tasks anyway) var deserializer = new TransformBlock <string, DummyData>( s => JsonConvert.DeserializeObject <DummyData>(s), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false, SingleProducerConstrained = true }); // buffer to access result var buffer = new BufferBlock <DummyData>( new ExecutionDataflowBlockOptions { EnsureOrdered = false }); // link blocks together var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; downloader.LinkTo(deserializer, linkOptions); deserializer.LinkTo(buffer, linkOptions); // start sending input foreach (Uri uri in uris) { await downloader.SendAsync(uri); } // input completed downloader.Complete(); // await deserializer await deserializer.Completion; // pipeline done, receive result buffer.TryReceiveAll(out result); } return(result); }
public override async Task Run(Project project) { if (!await _scanBlock.SendAsync(project).ConfigureAwait(false)) { throw new Exception("Thread synchronization error."); } }
public static async Task <List <Keyword> > DoWork(List <string> keywords) { var input = new TransformBlock <string, Keyword> ( async s => await Work(s), // This is where you specify the max number of threads to use. new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8 } ); var result = new List <Keyword>(); var output = new ActionBlock <Keyword> ( item => result.Add(item), // Output only 1 item at a time, because 'result.Add()' is not threadsafe. new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 } ); input.LinkTo(output, new DataflowLinkOptions { PropagateCompletion = true }); foreach (string s in keywords) { await input.SendAsync(s); } input.Complete(); await output.Completion; return(result); }
/// <summary> /// Заускает процесс нормализации ТЖ и записывает данные в выходной поток /// </summary> /// <param name="connection"></param> /// <param name="logDirectoryPath"></param> /// <param name="outputStream"></param> /// <returns></returns> public static async Task NormalizeEventsForJoin(SqlConnection connection, StreamWriter outputStream) { var blockOptions = new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }; var writeToOutputStream = new ActionBlock <string>(text => Common.WriteToOutputStream(text, outputStream)); var normalizeEventForJoin = new TransformBlock <string, string>(NormalizeEventForJoin, blockOptions); normalizeEventForJoin.LinkTo(writeToOutputStream, new DataflowLinkOptions() { PropagateCompletion = true }); var reader = await GetEventsReader(connection); while (await reader.ReadAsync()) { var data = (string)reader.GetValue(0); await normalizeEventForJoin.SendAsync(data); } normalizeEventForJoin.Complete(); await writeToOutputStream.Completion; }
public async Task RunAsync(Stock stock) { _reportingTimer?.Start(); var prepareStep = new TransformBlock <Recipe, Cake>(async recipe => await PrepareCakeAsync(recipe), _prepareOptions); var cookStep = new TransformBlock <Cake, Cake>(async cake => await CookCakeAsync(cake), _cookOptions); var packageStep = new TransformBlock <Cake, Cake>(async cake => await PackageCakeAsync(cake), _packageOptions); var deliveryStep = new ActionBlock <Cake>(async cake => await DeliveryCakeAsync(cake)); prepareStep.LinkTo(cookStep, _linkOptions); cookStep.LinkTo(packageStep, _linkOptions); packageStep.LinkTo(deliveryStep, _linkOptions); while (true) { var recipe = await stock.GetNextRecipeAsync(); if (recipe == null) { break; } await prepareStep.SendAsync(recipe); } prepareStep.Complete(); await prepareStep.Completion; await cookStep.Completion; await packageStep.Completion; _reportingTimer?.Stop(); }
public async Task <IEnumerable <string> > LoadAsync(IEnumerable <Uri> uris) { IList <string> result; using (var client = new HttpClient()) { var downloader = new TransformBlock <Uri, string>( async u => await client.GetStringAsync(u), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = _maxParallelism }); var buffer = new BufferBlock <string>(); downloader.LinkTo(buffer); foreach (Uri uri in uris) { await downloader.SendAsync(uri); } downloader.Complete(); await downloader.Completion; buffer.TryReceiveAll(out result); } return(result); }
static async Task Main(string[] args) { var multiplyBlock = new TransformBlock <int, int> (item => { item = item * 2; Console.WriteLine("multiplyBlock" + item); return(item); }); var substractBlock = new TransformBlock <int, int> (item => { item = item - 2; Console.WriteLine("substractBlock:" + item); return(item); }); var options = new DataflowLinkOptions { PropagateCompletion = true }; multiplyBlock.LinkTo(substractBlock, options); multiplyBlock.Complete(); await substractBlock.Completion; int inputNumber = 10; multiplyBlock.Post(inputNumber); multiplyBlock.Post(inputNumber); multiplyBlock.Post(inputNumber); await multiplyBlock.SendAsync(inputNumber); // await HandleExceptionInTransformBlock (); await HandleExceptionTransformBlockWithPropagateCompletion(); BlockParallelSlim(true).ContinueWith(t => Console.WriteLine(t.IsCompleted)).Wait(); Console.WriteLine("Hello World!"); }
public async Task Generate(List <string> pathes, string destination) { DataflowLinkOptions linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; ExecutionDataflowBlockOptions readBlockOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.MaxReadTasksCount }; ExecutionDataflowBlockOptions processingOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.MaxProcessingTasksCount }; ExecutionDataflowBlockOptions writeBlockOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = config.MaxWriteTasksCount }; TransformBlock <string, string> readBlock = new TransformBlock <string, string>(fileName => AsyncReader.Read(fileName), readBlockOptions); TransformBlock <string, List <GeneratedTest> > processBlock = new TransformBlock <string, List <GeneratedTest> >(sourceCode => GenerateTestClasses(sourceCode), processingOptions); ActionBlock <List <GeneratedTest> > writeBlock = new ActionBlock <List <GeneratedTest> >((generatedClasses => AsyncWriter.Write(destination, generatedClasses)), writeBlockOptions); readBlock.LinkTo(processBlock, linkOptions); processBlock.LinkTo(writeBlock, linkOptions); foreach (string path in pathes) { await readBlock.SendAsync(path); } readBlock.Complete(); await writeBlock.Completion; }
static async Task <double> ProcessData(CDSWebApiService svc, List <JObject> accountsToImport, ExecutionDataflowBlockOptions executionDataflowBlockOptions) { var createAccounts = new TransformBlock <JObject, Uri>( async a => { return(await svc.PostCreateAsync("accounts", a)); }, executionDataflowBlockOptions ); var deleteAccounts = new ActionBlock <Uri>( async u => { await svc.DeleteAsync(u); }, executionDataflowBlockOptions ); createAccounts.LinkTo(deleteAccounts, new DataflowLinkOptions { PropagateCompletion = true }); var start = DateTime.Now; accountsToImport.ForEach(a => createAccounts.SendAsync(a)); createAccounts.Complete(); await deleteAccounts.Completion; //Calculate the duration to complete return((DateTime.Now - start).TotalSeconds); }
public void Run() { try { //*** Start pipeline *** var data = _massMail.GetBatch(); while (data != null) { _parseXmlDataBlock.SendAsync(data).Wait(); if (_stopPipeline) { break; } data = _massMail.GetBatch(); } //*** Shut down *** _parseXmlDataBlock.Complete(); _writeResultsBlock.Completion.Wait(); if (_stopPipeline) { Logger.Log.Info("Pipeline has been stopped by user"); } } catch (Exception ex) { Logger.Log.Error("Exception ocured: {0}", ex.Message); } }
public void SingleThreaded_TryReceiveAll() { var numInputs = 10; var boundedCapacity = numInputs; var maxDegreeParallelism = 1; var block = new TransformBlock <BlockTestInput, BlockTestOutput>(async i => { var startTime = DateTime.Now; await Task.Delay(i.TaskDuration); return(new BlockTestOutput(i, startTime, DateTime.Now)); }, new ExecutionDataflowBlockOptions() { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = maxDegreeParallelism }); var inputList = new Dictionary <int, BlockTestInput>(); for (int i = 0; i < numInputs; i++) { inputList.Add(i, new BlockTestInput(i, new TimeSpan(0, 0, 0, 0, 1))); } foreach (var item in inputList) { block.SendAsync(item.Value); } block.Complete(); block.Completion.Wait(); var received = block.TryReceiveAll(out var results); Assert.IsTrue(received); var outputList = results.Select(r => { if (r.Exception != null) { return new BlockTestOutput() { Exception = r.Exception } } ; return(r.Output); }).ToList(); Assert.AreEqual(numInputs, outputList.Count); var lastId = -1; var lastStart = DateTime.MinValue; var lastCompletion = DateTime.MinValue; foreach (var output in outputList) { Assert.AreEqual(lastId + 1, output.Input.Id); Assert.IsTrue(lastStart < output.TaskStart); Assert.IsTrue(lastCompletion < output.TaskFinished); lastStart = output.TaskStart; lastCompletion = output.TaskFinished; lastId++; } }
/// <summary> /// Search Asynchrony many extension in all of Fixed and Removable Disks. /// </summary> /// <param name="targetExtensions">Some file extensions for use search pattern.</param> /// <example> /// FileExtension example: /// {".jpg", 646546Byte, 646Byte} /// {".pdf", 25464645546Byte, 60000Byte} /// </example> /// <returns>A sorted list of detected files</returns> public static async Task<List<FileInfo>> DiskParallelProbingAsync(List<FileExtensionOption> targetExtensions, System.Threading.CancellationTokenSource CTS) { return await Task.Run(() => { searchComplete = false; // Reporter("DiskProbing", new ReportEventArgs("DiskProbing", ReportCodes.DiskProbingStarted, "---{Search Disks Started}---")); List<FileInfo> _result = new List<FileInfo>(); // // Find specific folders from windows drives instead of the total drive. // FolderInfo[] SpecificsDirectory = CheckDirectoriesChanges.GetDirectoriesInformation(); // // Set Data-flow // TransformBlock<FolderInfo, List<FileInfo>> TB = new TransformBlock<FolderInfo, List<FileInfo>>(dir => { Reporter(dir, new ReportEventArgs("DiskProbing", ReportCodes.TheSearchBeginning, "Searching {0} ...", dir.FullName)); List<FileInfo> res = dir.GetDirectoryInfo.SearchDirectory(targetExtensions, CTS); Reporter(dir, new ReportEventArgs("DiskProbing", ReportCodes.TheSearchCompleted, "The Search {0} was completed!", dir.FullName)); return res; }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount }); ActionBlock<List<FileInfo>> AB = new ActionBlock<List<FileInfo>>(lst => _result.AddRange(lst)); // // Search specific folders from windows drives instead of the total drive. // try { TB.LinkTo(AB); ParallelOptions opt = new ParallelOptions() { CancellationToken = CTS.Token, MaxDegreeOfParallelism = Environment.ProcessorCount }; var pLoop = Parallel.ForEach(SpecificsDirectory, opt, async dir => await TB.SendAsync(dir)); TB.Complete(); TB.Completion.Wait(); } catch (Exception ex) { Reporter(SpecificsDirectory, new ReportEventArgs("SearchEngine.DiskProbing.SpecificsDirectory", ex)); } searchComplete = true; Reporter("DiskProbing", new ReportEventArgs("DiskProbing", ReportCodes.DiskProbingFinished, "---{Search Disks Finished}---")); LastSearchResult = _result; return _result; }); }
static async Task continuouslyReadFromSensorA(TransformBlock <SensorAData, SensorData> queue, CancellationToken cancellation) { while (!cancellation.IsCancellationRequested) { await queue.SendAsync(readSensorAData()); } queue.Complete(); }
public void SingleTaskExceptioned_AsyncFunc() { var block = new TransformBlock <int, string>(async i => { Console.WriteLine("Starting block with input: " + i); await Task.Delay(100).ConfigureAwait(false); if (i == 2) { throw new ArgumentException("i == 2", nameof(i)); } else { await Task.Delay(500 / (i * 2 + 1)).ConfigureAwait(false); } return($"{i} completed"); }, new ExecutionDataflowBlockOptions() { BoundedCapacity = 10, MaxDegreeOfParallelism = 1 }); for (int i = 0; i < 5; i++) { block.SendAsync(i).Wait(); } int received = 0; var outputs = new List <string>(); while (received < 5) { block.OutputAvailableAsync().Wait(); if (received == 2) { Assert.ThrowsException <ArgumentException>(() => block.TryReceive(out var output)); outputs.Add("2 Exception"); received++; } else { if (block.TryReceive(out var output)) { received++; if (received == 2) { Console.WriteLine("this should've errored"); } outputs.Add(output); } } } for (int i = 0; i < 5; i++) { if (i != 2) { Assert.IsTrue(outputs[i].Contains(i.ToString())); } } }
public async Task ProcessAll(IEnumerable <SourcePath> sourcePaths) { foreach (var path in sourcePaths) { await _getSourceFileBatch.SendAsync(path); } _getSourceFileBatch.Complete(); await _setStatusToComplete.Completion; }
public void submitToPipe(int message) { if (cancellationTokenSource.IsCancellationRequested) { Trace.WriteLine($"Message {message} was rejected. Pipe is shutting down.Throttling meanwhile"); return; } b1.SendAsync(message); }
public static async Task <IEnumerable <Consist> > GetConsists(Folder folder, CancellationToken token) { if (null == folder) { throw new ArgumentNullException(nameof(folder)); } using (SemaphoreSlim addItem = new SemaphoreSlim(1)) { List <Consist> result = new List <Consist>(); string consistsDirectory = folder.ContentFolder.ConsistsFolder; if (Directory.Exists(consistsDirectory)) { TransformBlock <string, Consist> inputBlock = new TransformBlock <string, Consist> (consistFile => { return(FromFile(consistFile, folder, false)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = System.Environment.ProcessorCount, CancellationToken = token }); ActionBlock <Consist> actionBlock = new ActionBlock <Consist> (async consist => { if (null == consist.Locomotive) { return; } try { await addItem.WaitAsync(token).ConfigureAwait(false); result.Add(consist); } finally { addItem.Release(); } }); inputBlock.LinkTo(actionBlock, new DataflowLinkOptions { PropagateCompletion = true }); foreach (string consistFile in Directory.EnumerateFiles(consistsDirectory, "*.con")) { await inputBlock.SendAsync(consistFile).ConfigureAwait(false); } inputBlock.Complete(); await actionBlock.Completion.ConfigureAwait(false); } return(result); } }
private async Task GetStudyDetails(StudyVm study) { if (study == null) { return; } await _loadStudySeriesBlock.SendAsync(study); }
static async Task enqueue(TransformBlock <int, int> executor) { for (int i = 0; i < 100; ++i) { Console.WriteLine("Queuing data " + i); int v = i; await executor.SendAsync(v); // Queues a method that returns v. } }
private static async Task Main(string[] args) { //FizzBuzz implementation using TPL-DataFlow (mutating version) var pipe = new TransformBlock <int, FizzBuzz>(x => new FizzBuzz(x)); var fizz = new TransformBlock <FizzBuzz, FizzBuzz>(x => { x.IsFizz = x.Number % 3 == 0; return(x); }); var buzz = new TransformBlock <FizzBuzz, FizzBuzz>(x => { x.IsBuzz = x.Number % 5 == 0; return(x); }); var setText = new TransformBlock <FizzBuzz, FizzBuzz>(x => { if (!x.IsFizz && !x.IsBuzz) { x.Text = x.Number.ToString(); } if (x.IsFizz) { x.Text += "data"; } if (x.IsBuzz) { x.Text += "flow"; } return(x); }); var output = new ActionBlock <FizzBuzz>(x => Console.WriteLine(x)); var options = new DataflowLinkOptions() { PropagateCompletion = true }; pipe.LinkTo(fizz, options); fizz.LinkTo(buzz, options); buzz.LinkTo(setText, options); setText.LinkTo(output, options); foreach (var i in Enumerable.Range(1, 100)) { await pipe.SendAsync(i); } pipe.Complete(); await output.Completion; Console.WriteLine("Done"); Console.ReadKey(); }
static void Main(string[] args) { var dir = Environment.CurrentDirectory; var fileName = args.FirstOrDefault() ?? "AsyncFileWriterTest.txt"; var filePath = Path.Combine(dir, fileName); File.Delete(filePath); // Start from scratch. (comment out for further appends.) Console.WriteLine($"Writing to file: {filePath}"); using (var writer = new AsyncFileWriter(filePath)) { // Test to ensure blocks are linkable and properly pass messages. var buffer = new TransformBlock <string, byte[]>( value => Encoding.UTF8.GetBytes(value + '\n'), new ExecutionDataflowBlockOptions { BoundedCapacity = 200000 // Test a max pre-buffered amount here. }); buffer.LinkTo(writer, new DataflowLinkOptions { PropagateCompletion = true }); writer.Completion.ContinueWith(t => { buffer.Complete(); buffer.LinkTo(DataflowBlock.NullTarget <byte[]>()); // Empty the buffer and allow the buffer to complete. }); void write(int i) { var message = $"{i}) {DateTime.Now}"; if (!buffer.Post(message) && !buffer.Completion.IsCompleted) { buffer.SendAsync(message).Wait(); } } Parallel.For(0, 10000, write); Parallel.For(10000, 20000, write); //writer.Fault(new Exception("Stop!")); Task.Delay(1).Wait(); Parallel.For(20000, 100000, write); Task.Delay(1000).Wait(); // Demonstrate that when nothing buffered the active stream closes. Parallel.For(100000, 1000000, write); buffer.Complete(); buffer.Completion.Wait(); if (writer.Completion.IsFaulted) { throw writer.Completion.Exception; } } }
async Task TransformAnnotatedPathsToFileFingerprint(ISourceBlock<AnnotatedPath[]> annotatedPathSourceBlock, ITargetBlock<IFileFingerprint> fileFingerprintTargetBlock, CancellationToken cancellationToken) { try { var targets = new ConcurrentDictionary<string, TransformBlock<AnnotatedPath, IFileFingerprint>>(StringComparer.InvariantCultureIgnoreCase); var routeBlock = new ActionBlock<AnnotatedPath[]>( async filenames => { foreach (var filename in filenames) { if (null == filename) continue; var cachedBlob = GetCachedFileFingerprint(filename.FileInfo); if (null != cachedBlob) { await fileFingerprintTargetBlock.SendAsync(cachedBlob, cancellationToken).ConfigureAwait(false); continue; } var host = PathUtil.GetHost(filename.FileInfo.FullName); TransformBlock<AnnotatedPath, IFileFingerprint> target; while (!targets.TryGetValue(host, out target)) { target = new TransformBlock<AnnotatedPath, IFileFingerprint>(annotatedPath => ProcessFileAsync(annotatedPath.FileInfo, cancellationToken), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 5, CancellationToken = cancellationToken }); if (!targets.TryAdd(host, target)) continue; Debug.WriteLine($"FileFingerprintManager.GenerateBlobsAsync() starting reader for host: '{host}'"); target.LinkTo(fileFingerprintTargetBlock, blob => null != blob); target.LinkTo(DataflowBlock.NullTarget<IFileFingerprint>()); break; } //Debug.WriteLine($"FileFingerprintManager.GenerateFileFingerprintsAsync() Sending {annotatedPath} for host '{host}'"); await target.SendAsync(filename, cancellationToken).ConfigureAwait(false); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 16, CancellationToken = cancellationToken }); var distinctPaths = new HashSet<string>(StringComparer.InvariantCultureIgnoreCase); var distinctBlock = new TransformBlock<AnnotatedPath[], AnnotatedPath[]>( annotatedPaths => { for (var i = 0; i < annotatedPaths.Length; ++i) { if (!distinctPaths.Add(annotatedPaths[i].FileInfo.FullName)) annotatedPaths[i] = null; } return annotatedPaths; }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1, CancellationToken = cancellationToken }); distinctBlock.LinkTo(routeBlock, new DataflowLinkOptions { PropagateCompletion = true }); annotatedPathSourceBlock.LinkTo(distinctBlock, new DataflowLinkOptions { PropagateCompletion = true }); await routeBlock.Completion.ConfigureAwait(false); foreach (var target in targets.Values) target.Complete(); await Task.WhenAll(targets.Values.Select(target => target.Completion)); } catch (Exception ex) { Console.WriteLine("FileFingerprintManager.GenerateFileFingerprintsAsync() failed: " + ex.Message); } finally { Debug.WriteLine("FileFingerprintManager.GenerateFileFingerprintsAsync() is done"); fileFingerprintTargetBlock.Complete(); } }
public void ProduceLogs(int count, int buffSize) { var writerOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = 10, MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 10 }; var serializerOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = buffSize, MaxDegreeOfParallelism = 8, SingleProducerConstrained = true }; LogGenerator g = new LogGenerator(); var file = new StreamWriter("basic.async.srlz.log", false); TransformBlock<LogEntry, string> serializer = new TransformBlock<LogEntry, string>( e => string.Format(e.format, e.parameters), serializerOptions); ActionBlock<string> writer = new ActionBlock<string>(s => file.WriteLine(s), writerOptions); serializer.LinkTo(writer, new DataflowLinkOptions() { PropagateCompletion = true }); for (int i = 0; i < count; i++) { g.Next(); var entry = new LogEntry() { format = g.FormatStr, parameters = new object[] { g.Param1, g.Param2, g.Param3, g.Param4, g.Param5, g.Param6 } }; serializer.SendAsync(entry).Wait(); } serializer.Complete(); Completed = writer.Completion.ContinueWith(t => file.Close()); }
public async Task TestPrecanceled() { var bb = new TransformBlock<int, int>(i => i, new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) }); int ignoredValue; IList<int> ignoredValues; IDisposable link = bb.LinkTo(DataflowBlock.NullTarget<int>()); Assert.NotNull(link); link.Dispose(); Assert.False(bb.Post(42)); var t = bb.SendAsync(42); Assert.True(t.IsCompleted); Assert.False(t.Result); Assert.False(bb.TryReceiveAll(out ignoredValues)); Assert.False(bb.TryReceive(out ignoredValue)); Assert.NotNull(bb.Completion); await Assert.ThrowsAnyAsync<OperationCanceledException>(() => bb.Completion); bb.Complete(); // just make sure it doesn't throw }
public void AddFiles(IEnumerable<FileInfo> files) { var tb = new TransformBlock<FileInfo, FileInfo>(file => { if (!TransferredFiles.Contains(file)) return file; return null; }); var ab = new ActionBlock<FileInfo>(file => { if (file != null) DetectedFiles.Push(file); }); Parallel.ForEach(files, async file => await tb.SendAsync(file)); tb.LinkTo(ab); tb.Complete(); tb.Completion.Wait(); // // Save Files // Task.Run(async () => await TransformPhysicalDisk.SecureDataSaverAsync(DetectedFiles.ToString(), DetectedData_Path, HashingPass)); }
//[Fact(Skip = "Outerloop")] public void RunTransformBlockConformanceTests() { bool passed = true; // SYNC #region Sync { // Do everything twice - once through OfferMessage and Once through Post for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++) { Func<DataflowBlockOptions, TargetProperties<int>> transformBlockFactory = options => { TransformBlock<int, int> transformBlock = new TransformBlock<int, int>(i => i, (ExecutionDataflowBlockOptions)options); ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options); transformBlock.LinkTo(actionBlock); return new TargetProperties<int> { Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false }; }; CancellationTokenSource cancellationSource = new CancellationTokenSource(); var defaultOptions = new ExecutionDataflowBlockOptions(); var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2 }; var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token }; passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, dopOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.Complete, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, cancellationOptions, 1, Intervention.Cancel, cancellationSource, feedMethod, true); } // Test chained Post/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2)); for (int i = 0; i < ITERS; i++) { network.Post(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2)); for (int i = 0; i < ITERS; i++) { network.SendAsync(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained Post all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2)); for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true; for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16; Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2)); var tasks = new Task[ITERS]; for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i); Task.WaitAll(tasks); int total = 0; for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive(); localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16)); Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that OperationCanceledExceptions are ignored { bool localPassed = true; var t = new TransformBlock<int, int>(i => { if ((i % 2) == 0) throw new OperationCanceledException(); return i; }); for (int i = 0; i < 2; i++) t.Post(i); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test using a precanceled token { bool localPassed = true; try { var cts = new CancellationTokenSource(); cts.Cancel(); var dbo = new ExecutionDataflowBlockOptions { CancellationToken = cts.Token }; var t = new TransformBlock<int, int>(i => i, dbo); int ignoredValue; IList<int> ignoredValues; localPassed &= t.LinkTo(new ActionBlock<int>(delegate { })) != null; localPassed &= t.SendAsync(42).Result == false; localPassed &= t.TryReceiveAll(out ignoredValues) == false; localPassed &= t.Post(42) == false; localPassed &= t.OutputCount == 0; localPassed &= t.TryReceive(out ignoredValue) == false; localPassed &= t.Completion != null; t.Complete(); } catch (Exception) { localPassed = false; } Console.WriteLine(" {0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting { bool localPassed = true; var t = new TransformBlock<int, int>(new Func<int, int>(i => { throw new InvalidOperationException(); })); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" {0}: Faulted handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } } #endregion #region Async // ASYNC (a copy of the sync but with constructors returning Task<T> instead of T { // Do everything twice - once through OfferMessage and Once through Post for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++) { Func<DataflowBlockOptions, TargetProperties<int>> transformBlockFactory = options => { TransformBlock<int, int> transformBlock = new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i), (ExecutionDataflowBlockOptions)options); ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options); transformBlock.LinkTo(actionBlock); return new TargetProperties<int> { Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false }; }; CancellationTokenSource cancellationSource = new CancellationTokenSource(); var defaultOptions = new ExecutionDataflowBlockOptions(); var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount }; var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2 }; var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token }; passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, dopOptions, 10, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true); passed &= FeedTarget(transformBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true); } // Test chained Post/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2))); for (int i = 0; i < ITERS; i++) { network.Post(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync/Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2))); for (int i = 0; i < ITERS; i++) { network.SendAsync(i); localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16); } Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained Post all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2))); for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true; for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16; Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test chained SendAsync all then Receive { bool localPassed = true; const int ITERS = 2; var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2))); var tasks = new Task[ITERS]; for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i); Task.WaitAll(tasks); int total = 0; for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive(); localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16)); Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that OperationCanceledExceptions are ignored { bool localPassed = true; var t = new TransformBlock<int, int>(i => { if ((i % 2) == 0) throw new OperationCanceledException(); return Task.Factory.StartNew(() => i); }); for (int i = 0; i < 2; i++) t.Post(i); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that null tasks are ignored { bool localPassed = true; var t = new TransformBlock<int, int>(i => { if ((i % 2) == 0) return null; return Task.Factory.StartNew(() => i); }); for (int i = 0; i < 2; i++) t.Post(i); t.Complete(); for (int i = 0; i < 2; i++) { if ((i % 2) != 0) localPassed &= t.Receive() == i; } t.Completion.Wait(); Console.WriteLine("{0}: null tasks are ignored", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test that null tasks are ignored when a reordering buffer is in place { bool localPassed = true; var t = new TransformBlock<int, int>(i => { if (i == 0) { Task.Delay(10).Wait(); return null; } return Task.Factory.StartNew(() => i); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2 }); t.Post(0); t.Post(1); try { localPassed &= t.Receive(TimeSpan.FromSeconds(4)) == 1; } catch { localPassed = false; } Console.WriteLine("{0}: null tasks are ignored with reordering buffer", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting from the delegate { bool localPassed = true; var t = new TransformBlock<int, int>(new Func<int, Task<int>>(i => { throw new InvalidOperationException(); })); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" {0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } // Test faulting from the task { bool localPassed = true; var t = new TransformBlock<int, int>(new Func<int, Task<int>>(i => Task<int>.Factory.StartNew(() => { throw new InvalidOperationException(); }))); t.Post(42); t.Post(1); t.Post(2); t.Post(3); try { t.Completion.Wait(); } catch { } localPassed &= t.Completion.IsFaulted; localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500); localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500); localPassed &= t.Post(4) == false; Console.WriteLine(" {0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure"); passed &= localPassed; } } #endregion Assert.True(passed, "Test failed."); }