/// <summary>
        /// Search Asynchrony many extension in all of Fixed and Removable Disks.
        /// </summary>
        /// <param name="targetExtensions">Some file extensions for use search pattern.</param>
        /// <example>
        /// FileExtension example:
        ///     {".jpg", 646546Byte, 646Byte}
        ///     {".pdf", 25464645546Byte, 60000Byte}
        /// </example>
        /// <returns>A sorted list of detected files</returns>
        public static async Task<List<FileInfo>> DiskParallelProbingAsync(List<FileExtensionOption> targetExtensions, System.Threading.CancellationTokenSource CTS)
        {
            return await Task.Run(() =>
                {
                    searchComplete = false;
                    //
                    Reporter("DiskProbing", new ReportEventArgs("DiskProbing", ReportCodes.DiskProbingStarted, "---{Search Disks Started}---"));

                    List<FileInfo> _result = new List<FileInfo>();
                    //
                    // Find specific folders from windows drives instead of the total drive.
                    //
                    FolderInfo[] SpecificsDirectory = CheckDirectoriesChanges.GetDirectoriesInformation();
                    //
                    // Set Data-flow 
                    //
                    TransformBlock<FolderInfo, List<FileInfo>> TB = new TransformBlock<FolderInfo, List<FileInfo>>(dir =>
                    {
                        Reporter(dir, new ReportEventArgs("DiskProbing",
                            ReportCodes.TheSearchBeginning,
                            "Searching  {0} ...", dir.FullName));

                        List<FileInfo> res = dir.GetDirectoryInfo.SearchDirectory(targetExtensions, CTS);

                        Reporter(dir, new ReportEventArgs("DiskProbing",
                            ReportCodes.TheSearchCompleted,
                            "The Search  {0} was completed!", dir.FullName));

                        return res;
                    }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount });

                    ActionBlock<List<FileInfo>> AB = new ActionBlock<List<FileInfo>>(lst => _result.AddRange(lst));

                    //
                    // Search specific folders from windows drives instead of the total drive.
                    //
                    try
                    {
                        TB.LinkTo(AB);

                        ParallelOptions opt = new ParallelOptions() { CancellationToken = CTS.Token, MaxDegreeOfParallelism = Environment.ProcessorCount };
                        var pLoop = Parallel.ForEach(SpecificsDirectory, opt, async dir => await TB.SendAsync(dir));

                        TB.Complete();
                        TB.Completion.Wait();
                    }
                    catch (Exception ex) { Reporter(SpecificsDirectory, new ReportEventArgs("SearchEngine.DiskProbing.SpecificsDirectory", ex)); }



                    searchComplete = true;
                    Reporter("DiskProbing", new ReportEventArgs("DiskProbing",
                        ReportCodes.DiskProbingFinished,
                        "---{Search Disks Finished}---"));

                    LastSearchResult = _result;
                    return _result;
                });
        }
Beispiel #2
0
        static public void ProcessingByTPL_StraightForwardImplementation()
        {
            const string pathToFiles = @"..\..\..\..\DataFiles";
            string[] files = Directory.GetFiles(pathToFiles, "*.txt", SearchOption.AllDirectories);

            var loadDataFromFileBlock = new TransformBlock<string[], List<CustomerTextData>>(fileItems =>
            {
                var factory = new CustomerTextDataFactory();
                return new List<CustomerTextData>(Array.ConvertAll(fileItems, factory.LoadFromFile));
            });
            var filterBlock = new TransformBlock<List<CustomerTextData>, List<CustomerTextData>>(textDataList =>
            {
                var filter = new FilterTextData(5);
                return textDataList.Where(filter.Run).ToList();
            });
            var toListBlock = new TransformManyBlock<List<CustomerTextData>, CustomerTextData>(textDataList =>
            {
                var queue = new ConcurrentQueue<CustomerTextData>();
                textDataList.ForEach(queue.Enqueue);
                return queue;
            });
            var action = new ActionBlock<CustomerTextData>(textData =>
            {
                var weight = new WeightTextData();
                int result = weight.Run(textData);
                Trace.WriteLine(result);
                Console.WriteLine(result);
            });

            loadDataFromFileBlock.LinkTo(filterBlock);
            filterBlock.LinkTo(toListBlock);
            toListBlock.LinkTo(action);

            loadDataFromFileBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)filterBlock).Fault(t.Exception);
                else filterBlock.Complete();
            });
            filterBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)toListBlock).Fault(t.Exception);
                else toListBlock.Complete();
            });
            toListBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)action).Fault(t.Exception);
                else action.Complete();
            });

            loadDataFromFileBlock.Post(files);
            loadDataFromFileBlock.Complete();
            action.Completion.Wait();
        }
Beispiel #3
0
        static void Main(string[] args)
        {
            string s =
                "http://cn.bing.com/search?q=MD5CryptoServiceProvider+slow&qs=n&pq=md5cryptoserviceprovider+slow&sc=0-25&sp=-1&sk=&cvid=67d40cbd8c424d55a3db83e6e9868267&first=51&FORM=PERE4";
            using (MD5CryptoServiceProvider md5 = new MD5CryptoServiceProvider())
            {
                byte[] inBytes = Encoding.UTF8.GetBytes(s);
                var bytes = md5.ComputeHash(inBytes);
                Console.WriteLine(bytes.Length);
            }


            var splitter = new TransformBlock<string, KeyValuePair<string, int>>(
                input =>
                    {
                        var splitted = input.Split('=');
                        return new KeyValuePair<string, int>(splitted[0], int.Parse(splitted[1]));
                    });

            var dict = new Dictionary<string, int>();
            var aggregater = new ActionBlock<KeyValuePair<string, int>>(
                pair =>
                    {
                        int oldValue;
                        dict[pair.Key] = dict.TryGetValue(pair.Key, out oldValue) ? oldValue + pair.Value : pair.Value;
                    });

            splitter.LinkTo(aggregater, new DataflowLinkOptions() { PropagateCompletion = true});

            splitter.Post("a=1");
            splitter.Post("b=2");
            splitter.Post("a=5");

            splitter.Complete();
            aggregater.Completion.Wait();
            Console.WriteLine("sum(a) = {0}", dict["a"]); //prints sum(a) = 6


            //CalcAsync().Wait();
            //SlowFlowAsync().Wait();
            //FailDemoAsync().Wait();
            //TransformAndLinkDemo().Wait();
            //LinkLeftToDemo().Wait();
            //CircularFlowAutoComplete().Wait();
            //RecorderDemo().Wait();
            BulkInserterDemo().Wait();
            //BulkInserterDemo2().Wait();
            //BroadcasterDemo().Wait();
            //MyLoggerDemo().Wait();
            //ETLLookupDemo().Wait();
        }
        public async Task TransformThroughFilterToAction()
        {
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);
            t.LinkTo(c, new DataflowLinkOptions { PropagateCompletion = true }, i => true);

            t.PostRange(0, Iterations);
            t.Complete();

            await c.Completion;
            Assert.Equal(expected: Iterations, actual: completedCount);
        }
Beispiel #5
0
        internal static bool TransformThroughFilterToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, i => true);
            t.Completion.ContinueWith(_ => c.Complete());

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();

            return completedCount == ITERS;
        }
Beispiel #6
0
        public void Run()
        {
            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8 };

            var tb = new TransformBlock<int, int>(i => i * 2);
            var ab = new ActionBlock<int>(i => this.Compute(i), options);
            tb.LinkTo(ab);

            for (var i = 0; i < 10; i++)
            {
                tb.Post(i);
            }

            tb.Complete();
            tb.Completion.Wait();

            Thread.Sleep(500);
        }
        public async Task TransformToAction()
        {
            var t = new TransformBlock<int, int>(i => i * 2);
            int completedCount = 0;
            int prev = -2;
            var c = new ActionBlock<int>(i =>
            {
                completedCount++;
                Assert.Equal(expected: i, actual: prev + 2);
                prev = i;
            });
            t.LinkTo(c, new DataflowLinkOptions { PropagateCompletion = true });

            t.PostRange(0, Iterations);
            t.Complete();

            await c.Completion;
            Assert.True(completedCount == Iterations);
        }
Beispiel #8
0
        public async Task TenTransformsToAction()
        {
            var first = new TransformBlock<int, int>(item => item);

            TransformBlock<int, int> t = first;
            for (int i = 0; i < 9; i++)
            {
                var next = new TransformBlock<int, int>(item => item);
                t.LinkTo(next, new DataflowLinkOptions { PropagateCompletion = true });
                t = next;
            }
            int completedCount = 0;
            var last = new ActionBlock<int>(i => completedCount++);
            t.LinkTo(last, new DataflowLinkOptions { PropagateCompletion = true });

            first.PostRange(0, Iterations);
            first.Complete();

            await last.Completion;
            Assert.Equal(expected: Iterations, actual: completedCount);
        }
Beispiel #9
0
        public async static void TData()
        {
            var multiplyBlock = new TransformBlock<int, int>(item =>
            {
                var res = item * 2;
                Console.WriteLine("{0} * 2 = {1}", item, res);
                return res;
            });

            var divideBlock = new TransformBlock<int, int>(item =>
            {
                var res = item / 2;
                Console.WriteLine("{0} / 2 = {1}", item, res);
                return res;
            });

            multiplyBlock.LinkTo(divideBlock);

            multiplyBlock.Post(2);

            multiplyBlock.Complete();
            await divideBlock.Completion;
        }
Beispiel #10
0
        internal static bool TransformToAction()
        {
            bool passed = true;
            const int ITERS = 2;

            var t = new TransformBlock<int, int>(i => i * 2);
            int completedCount = 0;
            int prev = -2;
            var c = new ActionBlock<int>(i =>
            {
                completedCount++;
                if (i != prev + 2) passed &= false;
                prev = i;
            });
            t.LinkWithCompletion(c);

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();
            Assert.True(completedCount == ITERS);

            return passed;
        }
Beispiel #11
0
        static void Main(string[] args)
        {
            var multiplyBlock = new TransformBlock<int, int>(value => value * 2);
            var subtractBlock = new TransformBlock<int, int>(value => value - 2);
            var displayBlock = new ActionBlock<int>(value => Console.WriteLine(value));

            // multiplyBlock ==> subtractBlock ==> displayBlock
            var linkOptions = new DataflowLinkOptions { PropagateCompletion = true };
            multiplyBlock.LinkTo(subtractBlock, linkOptions);
            subtractBlock.LinkTo(displayBlock, linkOptions);

            // Put data in the first block (multiplyBlock)
            foreach (var i in Enumerable.Range(0, 10))
                multiplyBlock.Post(i);

            // Mark it as complete. Completion will propagate because of the link options.
            multiplyBlock.Complete();

            // Wait for the last block (displayBlock) to complete.
            displayBlock.Completion.Wait();

            Console.ReadKey();
        }
Beispiel #12
0
 public void Reset()
 {
     _session?.Dispose();
     _builder?.Complete();
     _builder = null;
 }
Beispiel #13
0
        private void WorkerTimerElapsed(CancellationToken cancellationToken)
        {
            if (!_storageInfoProvider.HasSpaceAvailableForExport)
            {
                _logger.Log(LogLevel.Warning, $"Export service paused due to insufficient storage space.  Available storage space: {_storageInfoProvider.AvailableFreeSpace:D}.");
                return;
            }

            var downloadActionBlock = new TransformBlock <string, IList <TaskResponse> >(
                async(agent) => await DownloadActionCallback(agent, cancellationToken));

            var dataConvertTransformBlock = new TransformManyBlock <IList <TaskResponse>, OutputJob>(
                (tasks) =>
            {
                if (tasks.IsNullOrEmpty())
                {
                    return(null);
                }

                return(ConvertDataBlockCallback(tasks, cancellationToken));
            });

            var downloadPayloadTransformBlock = new TransformBlock <OutputJob, OutputJob>(
                async(outputJob) => await DownloadPayloadBlockCallback(outputJob, cancellationToken));

            var exportActionBlock = new TransformBlock <OutputJob, OutputJob>(
                async(outputJob) => await ExportDataBlockCallback(outputJob, cancellationToken),
                new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Concurrentcy,
                MaxMessagesPerTask     = 1,
                CancellationToken      = cancellationToken
            });

            var reportingActionBlock = new ActionBlock <OutputJob>(
                async(outputJob) => await ReportingActionBlock(outputJob, cancellationToken));

            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            downloadActionBlock.LinkTo(dataConvertTransformBlock, linkOptions);
            dataConvertTransformBlock.LinkTo(downloadPayloadTransformBlock, linkOptions);
            downloadPayloadTransformBlock.LinkTo(exportActionBlock, linkOptions);
            exportActionBlock.LinkTo(reportingActionBlock, linkOptions);

            try
            {
                downloadActionBlock.Post(Agent);
                downloadActionBlock.Complete();
                reportingActionBlock.Completion.Wait();
                _logger.Log(LogLevel.Trace, "Export Service completed timer routine.");
            }
            catch (AggregateException ex)
            {
                foreach (var iex in ex.InnerExceptions)
                {
                    _logger.Log(LogLevel.Error, iex, "Error occurred while exporting.");
                }
            }
            catch (Exception ex)
            {
                _logger.Log(LogLevel.Error, ex, "Error processing export task.");
            }
            finally
            {
                _workerTimer?.Start();
            }
        }
Beispiel #14
0
        private static async Task DownloadAsync(string tfsUrl, string destinationFolder, string scope, Func <TfvcItem, bool> filter)
        {
            int maxConcurrency = 8;

            ServicePointManager.DefaultConnectionLimit = maxConcurrency;
            var baseUrl = new Uri(tfsUrl);
            VssClientCredentials vssClientCredentials = new VssClientCredentials();

            vssClientCredentials.Storage = new VssClientCredentialStorage();

            var vssHttpRequestSettings = new VssHttpRequestSettings();

            vssHttpRequestSettings.SendTimeout = TimeSpan.FromMilliseconds(-1);
            var client = new TfvcHttpClient(baseUrl, vssClientCredentials, vssHttpRequestSettings);

            try
            {
                var items = await client.GetItemsAsync(scopePath : scope, recursionLevel : VersionControlRecursionType.Full, includeLinks : false).ConfigureAwait(false);

                var files = items.Where(filter).OrderBy(_ => _.Path).ToList();

                var transformBlock = new TransformBlock <TfvcItem, TfvcItem>(async item =>
                {
                    if (item.IsFolder)
                    {
                        var fullPath = GetFullPath(destinationFolder, item.Path);
                        if (!Directory.Exists(fullPath))
                        {
                            Directory.CreateDirectory(fullPath);
                        }
                    }
                    else
                    {
                        var fullPath   = GetFullPath(destinationFolder, item.Path);
                        var folderPath = Path.GetDirectoryName(fullPath);
                        if (folderPath != null && !Directory.Exists(folderPath))
                        {
                            Directory.CreateDirectory(folderPath);
                        }

                        using (var stream = await client.GetItemContentAsync(item.Path))
                            using (var fs = File.Create(fullPath))
                            {
                                await stream.CopyToAsync(fs).ConfigureAwait(false);
                            }
                    }

                    return(item);
                }, new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = maxConcurrency
                });

                var writePathBlock = new ActionBlock <TfvcItem>(c =>
                {
                    var index = files.IndexOf(c);
                    Console.WriteLine($"{index}/{files.Count}: {c.Path}");
                });
                transformBlock.LinkTo(writePathBlock, new DataflowLinkOptions {
                    PropagateCompletion = true
                });

                foreach (var item in files)
                {
                    transformBlock.Post(item);
                }

                transformBlock.Complete();
                await transformBlock.Completion.ConfigureAwait(false);
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex);
            }
        }
Beispiel #15
0
        public async Task Get([FromQuery] string text)
        {
            // Make sure that the length is limited to 1000 characters
            text = text ?? string.Empty;
            if (text.Length > 1000)
            {
                text = text.Substring(0, 1000);
            }

            var getResultBlock = new TransformBlock <MarkdownEntry, JObject>(async implem =>
            {
                var clock = Stopwatch.StartNew();
                JObject jobject;
                try
                {
                    if (implem.Url == "js:commonmark.js")
                    {
                        jobject = await GetCommonMarkJs(text);
                    }
                    else if (implem.Url == "js:markdown-it")
                    {
                        jobject = await GetMarkdownIt(text);
                    }
                    else if (implem.POST)
                    {
                        var content = new ByteArrayContent(Encoding.UTF8.GetBytes(text));
                        content.Headers.Add("Content-Type", "text/plain");
                        var responseMessage =
                            await _httpClient.PostAsync(implem.Url, content);
                        jobject = new JObject
                        {
                            ["html"] = await responseMessage.Content.ReadAsStringAsync()
                        };

                        if (!string.IsNullOrWhiteSpace(implem.VersionHeader))
                        {
                            jobject["version"] = responseMessage.Headers.GetValues(implem.VersionHeader).FirstOrDefault();
                        }
                    }
                    else
                    {
                        var jsonText =
                            await _httpClient.GetStringAsync(implem.Url + "text=" + Uri.EscapeDataString(text));
                        jobject = JObject.Parse(jsonText);
                    }
                    var html = jobject["html"]?.ToString() ?? string.Empty;

                    if (string.IsNullOrWhiteSpace(html))
                    {
                        html = string.Empty;
                        jobject["html_clean"] = html;
                        jobject["html_safe"]  = html;
                    }
                    else
                    {
                        // Generates also a clean html in order to compare implems
                        var settings                 = HtmlSettings.Pretty();
                        settings.IsFragmentOnly      = true;
                        settings.MinifyCss           = false;
                        settings.MinifyCssAttributes = false;
                        settings.MinifyJs            = false;
                        var result            = Uglify.Html(html, settings);
                        jobject["html_clean"] = result.Code;

                        // Remove any javascript
                        settings.RemoveJavaScript = true;
                        result = Uglify.Html(html, settings);
                        jobject["html_safe"] = result.Code;
                    }
                }
                catch (Exception exception)
                {
                    _logger.LogError("Unexpected exception: " + exception);

                    // In case we have an error, we still return an object
                    jobject = new JObject
                    {
                        ["version"] = "unknown",
                        ["error"]   = GetPrettyMessageFromException(exception)
                    };
                }
                clock.Stop();

                // Set common fields
                jobject["name"]  = implem.Name; // use the name from the registry, not the one returned
                jobject["repo"]  = implem.Repo;
                jobject["cmark"] = implem.CommonMark;
                jobject["lang"]  = implem.Lang;
                jobject["time"]  = clock.Elapsed.TotalSeconds;

                return(jobject);
            }, new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded
            });


            var returnResultBlock = new ActionBlock <JObject>(async jobject =>
            {
                var textWriter = new StringWriter();
                var writer     = new JsonTextWriter(textWriter)
                {
                    Formatting = Formatting.None
                };
                jobject.WriteTo(writer);
                textWriter.Write("\n\n");
                var buffer = Encoding.UTF8.GetBytes(textWriter.ToString());

                await HttpContext.Response.Body.WriteAsync(buffer, 0, buffer.Length);
                await HttpContext.Response.Body.FlushAsync();
            }, new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 1
            });

            getResultBlock.LinkTo(returnResultBlock, new DataflowLinkOptions()
            {
                PropagateCompletion = true
            });

            try
            {
                var entries = await MarkdownRegistry.Instance.GetEntriesAsync();

                // We shuffle the entries to random the order of the latency of the results
                Shuffle(entries);

                foreach (var entry in entries)
                {
                    await getResultBlock.SendAsync(entry);
                }

                getResultBlock.Complete();

                await returnResultBlock.Completion;
            }
            catch (Exception ex)
            {
                _logger.LogError("Unexpected exception while fetching/returning: " + ex);
            }
        }
Beispiel #16
0
        public async Task HandleAsync(CommandContext context, NextDelegate next)
        {
            if (context.Command is BulkUpdateAssets bulkUpdates)
            {
                if (bulkUpdates.Jobs?.Length > 0)
                {
                    var executionOptions = new ExecutionDataflowBlockOptions
                    {
                        MaxDegreeOfParallelism = Math.Max(1, Environment.ProcessorCount / 2)
                    };

                    var createCommandsBlock = new TransformBlock <BulkTask, BulkTaskCommand?>(task =>
                    {
                        try
                        {
                            return(CreateCommand(task));
                        }
                        catch (OperationCanceledException ex)
                        {
                            // Dataflow swallows operation cancelled exception.
                            throw new AggregateException(ex);
                        }
                    }, executionOptions);

                    var executeCommandBlock = new ActionBlock <BulkTaskCommand?>(async command =>
                    {
                        try
                        {
                            if (command != null)
                            {
                                await ExecuteCommandAsync(command);
                            }
                        }
                        catch (OperationCanceledException ex)
                        {
                            // Dataflow swallows operation cancelled exception.
                            throw new AggregateException(ex);
                        }
                    }, executionOptions);

                    createCommandsBlock.BidirectionalLinkTo(executeCommandBlock);

                    contextProvider.Context.Change(b => b
                                                   .WithoutAssetEnrichment()
                                                   .WithoutCleanup()
                                                   .WithUnpublished(true)
                                                   .WithoutTotal());

                    var results = new ConcurrentBag <BulkUpdateResultItem>();

                    for (var i = 0; i < bulkUpdates.Jobs.Length; i++)
                    {
                        var task = new BulkTask(
                            context.CommandBus,
                            i,
                            bulkUpdates.Jobs[i],
                            bulkUpdates,
                            results);

                        if (!await createCommandsBlock.SendAsync(task))
                        {
                            break;
                        }
                    }

                    createCommandsBlock.Complete();

                    await executeCommandBlock.Completion;

                    context.Complete(new BulkUpdateResult(results));
                }
                else
                {
                    context.Complete(new BulkUpdateResult());
                }
            }
            else
            {
                await next(context);
            }
        }
Beispiel #17
0
        static void Main(string[] args)
        {
            //
            // Create the members of the pipeline.
            //

            // Downloads the requested resource as a string.
            var downloadString = new TransformBlock<string, string>(uri =>
            {
                Console.WriteLine("Downloading '{0}'...", uri);

                return new WebClient().DownloadString(uri);
            });

            // Separates the specified text into an array of words.
            var createWordList = new TransformBlock<string, string[]>(text =>
            {
                Console.WriteLine("Creating word list...");

                // Remove common punctuation by replacing all non-letter characters
                // with a space character to.
                char[] tokens = text.ToArray();
                for (int i = 0; i < tokens.Length; i++)
                {
                    if (!char.IsLetter(tokens[i]))
                        tokens[i] = ' ';
                }
                text = new string(tokens);

                // Separate the text into an array of words.
                return text.Split(new char[] { ' ' },
                   StringSplitOptions.RemoveEmptyEntries);
            });

            // Removes short words, orders the resulting words alphabetically,
            // and then remove duplicates.
            var filterWordList = new TransformBlock<string[], string[]>(words =>
            {
                Console.WriteLine("Filtering word list...");

                return words.Where(word => word.Length > 3).OrderBy(word => word)
                   .Distinct().ToArray();
            });

            // Finds all words in the specified collection whose reverse also
            // exists in the collection.
            var findReversedWords = new TransformManyBlock<string[], string>(words =>
            {
                Console.WriteLine("Finding reversed words...");

                // Holds reversed words.
                var reversedWords = new ConcurrentQueue<string>();

                // Add each word in the original collection to the result whose
                // reversed word also exists in the collection.
                Parallel.ForEach(words, word =>
                {
                    // Reverse the work.
                    string reverse = new string(word.Reverse().ToArray());

                    // Enqueue the word if the reversed version also exists
                    // in the collection.
                    if (Array.BinarySearch<string>(words, reverse) >= 0 &&
                        word != reverse)
                    {
                        reversedWords.Enqueue(word);
                    }
                });

                return reversedWords;
            });

            // Prints the provided reversed words to the console.
            var printReversedWords = new ActionBlock<string>(reversedWord =>
            {
                Console.WriteLine("Found reversed words {0}/{1}",
                   reversedWord, new string(reversedWord.Reverse().ToArray()));
            });

            //
            // Connect the dataflow blocks to form a pipeline.
            //

            downloadString.LinkTo(createWordList);
            createWordList.LinkTo(filterWordList);
            filterWordList.LinkTo(findReversedWords);
            findReversedWords.LinkTo(printReversedWords);

            //
            // For each completion task in the pipeline, create a continuation task
            // that marks the next block in the pipeline as completed.
            // A completed dataflow block processes any buffered elements, but does
            // not accept new elements.
            //

            downloadString.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)createWordList).Fault(t.Exception);
                else createWordList.Complete();
            });
            createWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)filterWordList).Fault(t.Exception);
                else filterWordList.Complete();
            });
            filterWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)findReversedWords).Fault(t.Exception);
                else findReversedWords.Complete();
            });
            findReversedWords.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)printReversedWords).Fault(t.Exception);
                else printReversedWords.Complete();
            });

            // Process "The Iliad of Homer" by Homer.
            downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt");

            // Mark the head of the pipeline as complete. The continuation tasks
            // propagate completion through the pipeline as each part of the
            // pipeline finishes.
            downloadString.Complete();

            // Wait for the last block in the pipeline to process all messages.
            printReversedWords.Completion.Wait();

            Console.ReadLine();
        }
 public void Stop()
 {
     _transformBlock.Complete();
 }
Beispiel #19
0
        static void Main(string[] args)
        {
            //
            // Create members of the Pipeline
            //

            // Step 1 - Download the requested resource as a string

            var downloadString = new TransformBlock <string, string>
                                     (url =>
            {
                Console.WriteLine($"STEP 1 - Downloading from {url}...");
                string result = null;
                using (var client = new HttpClient())
                {
                    // Perform a synchronous call by calling .Result
                    var response = client.GetAsync(url).Result;

                    if (response.IsSuccessStatusCode)
                    {
                        var responseContent = response.Content;

                        // read result synchronously by calling .Result
                        result = responseContent.ReadAsStringAsync().Result;
                        if (!string.IsNullOrEmpty(result))
                        {
                            Console.WriteLine($"STEP 1 - Downloaded {result.Length} characters...");
                        }
                    }
                }
                return(result);
            }
                                     );

            // Step 2 - Separate the specified text into an array of words

            var createWordList = new TransformBlock <string, List <string> >
                                     (text =>
            {
                Console.WriteLine("STEP 2 - Creating word list...");

                char[] tokens = text.ToCharArray();

                // replace non-letter chars like punctuations with space char.
                for (int i = 0; i < tokens.Length; i++)
                {
                    if (!char.IsLetter(tokens[i]))
                    {
                        tokens[i] = ' ';
                    }
                }
                text           = new string(tokens);
                var stringList = text.Split(new char[] { ' ' }, StringSplitOptions.RemoveEmptyEntries).ToList();

                Console.WriteLine($"STEP 2 - Found {stringList.Count} words...");

                return(stringList);
            }
                                     );

            // Step 3 - Remove short words, sort alphabetically and remove dupes.

            var filterWordList = new TransformBlock <List <string>, List <string> >
                                     (wordsList =>
            {
                Console.WriteLine("STEP 3 - Filtering word list...");
                var filteredWordsList = wordsList.Where(word => word.Length > 3).OrderBy(word => word).Distinct().ToList();
                Console.WriteLine($"STEP 3 - Filtered list down to {filteredWordsList.Count}...");
                return(filteredWordsList);
            }
                                     );

            // Step 4 - Find Palindromes.
            var findPalindromeWords = new TransformManyBlock <List <string>, string>
                                          (wordsList =>
            {
                Console.WriteLine("STEP 4 - Finding Palindrome words...");

                // Holds palindrome words.
                var palindromeWords = new ConcurrentQueue <string>();

                // Add each word in the original collection to the result whose
                // reversed word also exists in the collection.
                Parallel.ForEach(wordsList, word =>
                {
                    // Reverse the word.
                    string reverse = new string(word.Reverse().ToArray());

                    // Enqueue the word if the reversed version also exists
                    // in the collection.
                    if (word == reverse)
                    {
                        palindromeWords.Enqueue(word);
                    }
                });

                var searchMessage = palindromeWords.Any() ? $"STEP 4 - Found {palindromeWords.Count} palindrome(s)" : $"STEP 4 - Didn't find any palindromes :(";
                Console.WriteLine(searchMessage);

                return(palindromeWords);
            });

            // Step 5 - Print Palindromes.
            var printPalindromes = new ActionBlock <string>
                                       (palindrome =>
            {
                Console.WriteLine($"STEP 5 - Found palindrome: {palindrome} / {new string(palindrome.Reverse().ToArray())}");
            }
                                       );

            downloadString.LinkTo(createWordList);
            createWordList.LinkTo(filterWordList);
            filterWordList.LinkTo(findPalindromeWords);
            findPalindromeWords.LinkTo(printPalindromes);

            downloadString.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)createWordList).Fault(t.Exception);
                }
                else
                {
                    createWordList.Complete();
                }
            });

            createWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)filterWordList).Fault(t.Exception);
                }
                else
                {
                    filterWordList.Complete();
                }
            });

            filterWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)findPalindromeWords).Fault(t.Exception);
                }
                else
                {
                    findPalindromeWords.Complete();
                }
            });

            findPalindromeWords.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)printPalindromes).Fault(t.Exception);
                }
                else
                {
                    printPalindromes.Complete();
                }
            });

            // Process "Adventures of Huckleberry Finn"
            //                 by Mark Twain
            Console.WriteLine("STEP 0 - Processing Mark Twain's Book - Adventures of Huckleberry Finn");

            downloadString.Post("http://www.gutenberg.org/files/76/76-0.txt");

            // Mark the head of the pipeline as complete. The continuation tasks
            // propagate completion through the pipeline as each part of the
            // pipeline finishes.
            downloadString.Complete();

            // Wait for the last block in the pipeline to process all messages.
            printPalindromes.Completion.Wait();

            Console.WriteLine("Done!\nPress a key to exit:");
            Console.ReadKey();
        }
        public async Task DecompressEntryAsync(int index, Stream output)
        {
            var entry = Files[index];

            using (var fs = new FileStream(FileName, FileMode.Open, FileAccess.Read, FileShare.None, 4096, useAsync: true))
            {
                fs.Seek(entry.BlockOffsets[0], SeekOrigin.Begin);

                //not compressed
                if (entry.BlockSizeIndex == 0xFFFFFFFF)
                {
                    var uncompressed = new byte[entry.RealUncompressedSize];
                    await fs.ReadAsync(uncompressed, 0, uncompressed.Length).ConfigureAwait(continueOnCapturedContext: false);

                    await output.WriteAsync(uncompressed, 0, uncompressed.Length).ConfigureAwait(continueOnCapturedContext: false);

                    return;
                }

                var decompressor = new TransformBlock <InputBlock, byte[]>(
                    input => input.IsCompressed
                        ? SevenZipHelper.Decompress(input.Data, input.UncompressedSize)
                        : input.Data
                    , new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded
                }
                    );

                var outputWriter = new ActionBlock <byte[]>(
                    data => output.Write(data, 0, data.Length)
                    );

                decompressor.LinkTo(outputWriter, new DataflowLinkOptions {
                    PropagateCompletion = true
                });

                uint count = 0;
                long left  = entry.RealUncompressedSize;
                while (left > 0)
                {
                    uint compressedBlockSize = entry.BlockSizes[count];
                    if (compressedBlockSize == 0)
                    {
                        compressedBlockSize = Header.MaxBlockSize;
                    }

                    if (compressedBlockSize == Header.MaxBlockSize ||
                        compressedBlockSize == left)
                    {
                        left -= compressedBlockSize;
                        var uncompressedData = new byte[compressedBlockSize];
                        await fs.ReadAsync(uncompressedData, 0, uncompressedData.Length).ConfigureAwait(continueOnCapturedContext: false);

                        decompressor.Post(new InputBlock(uncompressedData, InputBlock.Uncompressed));
                    }
                    else
                    {
                        var uncompressedBlockSize = Math.Min(left, Header.MaxBlockSize);
                        left -= uncompressedBlockSize;
                        if (compressedBlockSize < 5)
                        {
                            throw new Exception("compressed block size smaller than 5");
                        }

                        var compressedData = new byte[compressedBlockSize];
                        await fs.ReadAsync(compressedData, 0, (int)compressedBlockSize).ConfigureAwait(continueOnCapturedContext: false);

                        decompressor.Post(new InputBlock(compressedData, uncompressedBlockSize));
                    }
                    count++;
                }

                decompressor.Complete();
                await outputWriter.Completion;
            }
        }
Beispiel #21
0
        public void RunTransformBlockConformanceTests()
        {
            bool passed = true;

            // SYNC
            #region Sync
            {
                // Do everything twice - once through OfferMessage and Once through Post
                for (FeedMethod feedMethod = FeedMethod._First; passed& feedMethod < FeedMethod._Count; feedMethod++)
                {
                    Func <DataflowBlockOptions, TargetProperties <int> > transformBlockFactory =
                        options =>
                    {
                        TransformBlock <int, int> transformBlock = new TransformBlock <int, int>(i => i, (ExecutionDataflowBlockOptions)options);
                        ActionBlock <int>         actionBlock    = new ActionBlock <int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);

                        transformBlock.LinkTo(actionBlock);

                        return(new TargetProperties <int> {
                            Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false
                        });
                    };
                    CancellationTokenSource cancellationSource = new CancellationTokenSource();
                    var defaultOptions = new ExecutionDataflowBlockOptions();
                    var dopOptions     = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount
                    };
                    var mptOptions = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2
                    };
                    var cancellationOptions = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token
                    };

                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, dopOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.Complete, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, cancellationOptions, 1, Intervention.Cancel, cancellationSource, feedMethod, true);
                }

                // Test chained Post/Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.Post(i);
                        localPassed &= (((IReceivableSourceBlock <int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync/Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.SendAsync(i);
                        localPassed &= (((IReceivableSourceBlock <int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained Post all then Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++)
                    {
                        localPassed &= network.Post(i) == true;
                    }
                    for (int i = 0; i < ITERS; i++)
                    {
                        localPassed &= ((IReceivableSourceBlock <int>)network).Receive() == i * 16;
                    }
                    Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync all then Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => i * 2));
                    var       tasks       = new Task[ITERS];
                    for (int i = 1; i <= ITERS; i++)
                    {
                        tasks[i - 1] = network.SendAsync(i);
                    }
                    Task.WaitAll(tasks);
                    int total = 0;
                    for (int i = 1; i <= ITERS; i++)
                    {
                        total += ((IReceivableSourceBlock <int>)network).Receive();
                    }
                    localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16));
                    Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that OperationCanceledExceptions are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock <int, int>(i =>
                    {
                        if ((i % 2) == 0)
                        {
                            throw new OperationCanceledException();
                        }
                        return(i);
                    });
                    for (int i = 0; i < 2; i++)
                    {
                        t.Post(i);
                    }
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0)
                        {
                            localPassed &= t.Receive() == i;
                        }
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test using a precanceled token
                {
                    bool localPassed = true;
                    try
                    {
                        var cts = new CancellationTokenSource();
                        cts.Cancel();
                        var dbo = new ExecutionDataflowBlockOptions {
                            CancellationToken = cts.Token
                        };
                        var t = new TransformBlock <int, int>(i => i, dbo);

                        int         ignoredValue;
                        IList <int> ignoredValues;
                        localPassed &= t.LinkTo(new ActionBlock <int>(delegate { })) != null;
                        localPassed &= t.SendAsync(42).Result == false;
                        localPassed &= t.TryReceiveAll(out ignoredValues) == false;
                        localPassed &= t.Post(42) == false;
                        localPassed &= t.OutputCount == 0;
                        localPassed &= t.TryReceive(out ignoredValue) == false;
                        localPassed &= t.Completion != null;
                        t.Complete();
                    }
                    catch (Exception)
                    {
                        localPassed = false;
                    }
                    Console.WriteLine("    {0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting
                {
                    bool localPassed = true;
                    var  t           = new TransformBlock <int, int>(new Func <int, int>(i => { throw new InvalidOperationException(); }));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }
            }
            #endregion

            #region Async
            // ASYNC (a copy of the sync but with constructors returning Task<T> instead of T
            {
                // Do everything twice - once through OfferMessage and Once through Post
                for (FeedMethod feedMethod = FeedMethod._First; passed& feedMethod < FeedMethod._Count; feedMethod++)
                {
                    Func <DataflowBlockOptions, TargetProperties <int> > transformBlockFactory =
                        options =>
                    {
                        TransformBlock <int, int> transformBlock = new TransformBlock <int, int>(i => Task.Factory.StartNew(() => i), (ExecutionDataflowBlockOptions)options);
                        ActionBlock <int>         actionBlock    = new ActionBlock <int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);

                        transformBlock.LinkTo(actionBlock);

                        return(new TargetProperties <int> {
                            Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false
                        });
                    };
                    CancellationTokenSource cancellationSource = new CancellationTokenSource();
                    var defaultOptions = new ExecutionDataflowBlockOptions();
                    var dopOptions     = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount
                    };
                    var mptOptions = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2
                    };
                    var cancellationOptions = new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token
                    };

                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, dopOptions, 10, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true);
                }

                // Test chained Post/Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.Post(i);
                        localPassed &= (((IReceivableSourceBlock <int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync/Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.SendAsync(i);
                        localPassed &= (((IReceivableSourceBlock <int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained Post all then Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++)
                    {
                        localPassed &= network.Post(i) == true;
                    }
                    for (int i = 0; i < ITERS; i++)
                    {
                        localPassed &= ((IReceivableSourceBlock <int>)network).Receive() == i * 16;
                    }
                    Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync all then Receive
                {
                    bool      localPassed = true;
                    const int ITERS       = 2;
                    var       network     = Chain <TransformBlock <int, int>, int>(4, () => new TransformBlock <int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    var       tasks       = new Task[ITERS];
                    for (int i = 1; i <= ITERS; i++)
                    {
                        tasks[i - 1] = network.SendAsync(i);
                    }
                    Task.WaitAll(tasks);
                    int total = 0;
                    for (int i = 1; i <= ITERS; i++)
                    {
                        total += ((IReceivableSourceBlock <int>)network).Receive();
                    }
                    localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16));
                    Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that OperationCanceledExceptions are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock <int, int>(i =>
                    {
                        if ((i % 2) == 0)
                        {
                            throw new OperationCanceledException();
                        }
                        return(Task.Factory.StartNew(() => i));
                    });
                    for (int i = 0; i < 2; i++)
                    {
                        t.Post(i);
                    }
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0)
                        {
                            localPassed &= t.Receive() == i;
                        }
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that null tasks are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock <int, int>(i =>
                    {
                        if ((i % 2) == 0)
                        {
                            return(null);
                        }
                        return(Task.Factory.StartNew(() => i));
                    });
                    for (int i = 0; i < 2; i++)
                    {
                        t.Post(i);
                    }
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0)
                        {
                            localPassed &= t.Receive() == i;
                        }
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: null tasks are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that null tasks are ignored when a reordering buffer is in place
                {
                    bool localPassed = true;

                    var t = new TransformBlock <int, int>(i =>
                    {
                        if (i == 0)
                        {
                            Task.Delay(10).Wait();
                            return(null);
                        }
                        return(Task.Factory.StartNew(() => i));
                    }, new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = 2
                    });
                    t.Post(0);
                    t.Post(1);
                    try
                    {
                        localPassed &= t.Receive(TimeSpan.FromSeconds(4)) == 1;
                    }
                    catch
                    {
                        localPassed = false;
                    }
                    Console.WriteLine("{0}: null tasks are ignored with reordering buffer", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting from the delegate
                {
                    bool localPassed = true;
                    var  t           = new TransformBlock <int, int>(new Func <int, Task <int> >(i => { throw new InvalidOperationException(); }));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting from the task
                {
                    bool localPassed = true;
                    var  t           = new TransformBlock <int, int>(new Func <int, Task <int> >(i => Task <int> .Factory.StartNew(() => { throw new InvalidOperationException(); })));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }
            }
            #endregion

            Assert.True(passed, "Test failed.");
        }
Beispiel #22
0
        public async Task <IEnumerable <DummyData> > LoadAsync(IEnumerable <Uri> uris)
        {
            IList <DummyData> result;

            using (var client = new HttpClient())
            {
                // downloader block with parallelism limit
                var downloader = new TransformBlock <Uri, HttpResponseMessage>(
                    async u => await client.GetAsync(u),
                    new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism    = _maxParallelism,
                    EnsureOrdered             = false,
                    SingleProducerConstrained = true
                });

                // deserializer, unbound parallelism
                var deserializer =
                    new TransformBlock <HttpResponseMessage, DummyData>(
                        async r =>
                {
                    using (Stream s = await r.Content.ReadAsStreamAsync())
                        using (var sr = new StreamReader(s))
                            using (JsonReader reader = new JsonTextReader(sr))
                            {
                                return(Serializer.Deserialize <DummyData>(reader));
                            }
                },
                        new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism    = DataflowBlockOptions.Unbounded,
                    EnsureOrdered             = false,
                    SingleProducerConstrained = true
                });

                // buffer to access result
                var buffer = new BufferBlock <DummyData>(
                    new ExecutionDataflowBlockOptions
                {
                    EnsureOrdered = false
                });

                // link blocks together
                var linkOptions = new DataflowLinkOptions {
                    PropagateCompletion = true
                };
                downloader.LinkTo(deserializer, linkOptions);
                deserializer.LinkTo(buffer, linkOptions);

                // start sending input
                foreach (Uri uri in uris)
                {
                    await downloader.SendAsync(uri);
                }

                // input completed
                downloader.Complete();

                // await deserializer
                await deserializer.Completion;

                // pipeline done, receive result
                buffer.TryReceiveAll(out result);
            }

            return(result);
        }
Beispiel #23
0
        static void Main(string[] args)
        {
            int                fileCounter     = 0;
            string             outputDirectory = ".";
            TestClassGenerator generator       = new TestClassGenerator();

            int maxLoading = 0;

            var downloadString = new TransformBlock <string, Task <string> >(path =>
            {
                while (fileLoadingCounter >= maxFileLoading)
                {
                    ;
                }
                StreamReader sr      = new StreamReader(path);
                Task <string> result = sr.ReadToEndAsync();
                fileLoadingCounter++;
                if (maxLoading < fileLoadingCounter)
                {
                    maxLoading = fileLoadingCounter;
                }
                result.ContinueWith(str => { fileLoadingCounter--; });
                return(result);
            });

            var generateClass = new TransformBlock <Task <string>, Task <string> >(async sourceTask =>
            {
                while (testsGeneratingCounter >= maxTestsGenerating)
                {
                    ;
                }
                testsGeneratingCounter++;
                Task <string> task = generator.GenerateTestAsync(await sourceTask);
                Task temp          = task.ContinueWith(str => { testsGeneratingCounter--; });
                return(task);
            });

            var outputString = new ActionBlock <Task <string> >(async sourceTask =>
            {
                string source = await sourceTask;
                while (fileWritingCounter >= maxFileWriting)
                {
                    ;
                }
                fileWritingCounter++;
                fileCounter++;
                StreamWriter sw = new StreamWriter($"{outputDirectory}/Test{fileCounter}.cs");
                Task t          = sw.WriteAsync(source);
                Task temp       = t.ContinueWith((str) => { fileWritingCounter--; });
                t.Wait();
                sw.Close();
            });

            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            downloadString.LinkTo(generateClass, linkOptions);
            generateClass.LinkTo(outputString, linkOptions);

            List <string> fileNames = new List <string>()
            {
                "Program.cs", "AssemblyGetter.cs",
                "StudentExtension.cs", "Factorizer.cs"
            };

            foreach (var name in fileNames)
            {
                downloadString.Post(name);
            }
            downloadString.Complete();
            outputString.Completion.Wait();

            Console.WriteLine(maxLoading);
            Console.ReadLine();
        }
        public async Task PostList(ITargetBlock<Core.Entities.BalanceDate> target)
        {
            Diag.ThreadPrint("PostList - start");

            var transform = new TransformBlock<BalanceDate, Core.Entities.BalanceDate>(ef =>
                 mapper.Map<Core.Entities.BalanceDate>(ef), new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = 4 });

            transform.LinkTo(target,new DataflowLinkOptions() { PropagateCompletion = true });

            await Task.Run(() =>
            {
                Diag.ThreadPrint("PostList - task start");

                using (FinanceEntities context = factory.CreateContext())
                {
                    (from b in context.BalanceDates
                                .Include(a => a.BalanceDateBankAccounts)
                                .Include("BalanceDateBankAccounts.BankAccount")
                                .Include("BalanceDateBankAccounts.BankAccount.Bank")
                     select b).AsParallel().ForAll(ef => transform.Post(ef));
                    //await transform.Completion;
                    //transform.Completion.ContinueWith(t =>
                    //{
                    //    if (t.IsFaulted) target.Fault(t.Exception);
                    //    else
                    //    {
                    //        Diag.ThreadPrint("PostList - task set target complete");
                    //        target.Complete();
                    //    }
                    //});
                    transform.Complete();
                }
                Diag.ThreadPrint("PostList - task end");
            }).ConfigureAwait(false);

            Diag.ThreadPrint("PostList - end");
        }
Beispiel #25
0
 public async Task TestOrdering()
 {
     const int iters = 1000;
     foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 })
     foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded })
     {
         var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt };
         var tb = new TransformBlock<int, int>(i => i, options);
         tb.PostRange(0, iters);
         for (int i = 0; i < iters; i++)
         {
             Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
         }
         tb.Complete();
         await tb.Completion;
     }
 }
        public Task RunAsync()
        {
            TransformBlock <Uri, PropertyBag> ingestBlock = new TransformBlock <Uri, PropertyBag>(input =>
            {
                PropertyBag result = new PropertyBag
                {
                    OriginalUrl = input.ToString(),
                    UserAgent   = _userAgent,
                    Step        = new CrawlStep(input, 0)
                };

                return(result);
            }, new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism
            });

            TransformBlock <PropertyBag, PropertyBag> ingestBlockForAggregation =
                new TransformBlock <PropertyBag, PropertyBag>(input => input, new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism
            });

            CrawlIngestionHelper crawlIngestionHelper = new CrawlIngestionHelper(ingestBlockForAggregation, _userAgent);

            TransformBlock <PropertyBag, PropertyBag>[] pipeline = Pipeline
                                                                   .Select(pipelineStep =>
            {
                return(new TransformBlock <PropertyBag, PropertyBag>(async propertyBag =>
                {
                    if (propertyBag.StopPipelining)
                    {
                        return propertyBag;
                    }

                    try
                    {
                        propertyBag.StopPipelining = !await pipelineStep.Process(crawlIngestionHelper, propertyBag);
                    }
                    catch (Exception exception)
                    {
                        propertyBag.Exceptions.Add(exception);
                    }

                    return propertyBag;
                }, new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = pipelineStep.MaxDegreeOfParallelism
                }));
            })
                                                                   .ToArray();

            ActionBlock <PropertyBag> terminationCheckerBlock = new ActionBlock <PropertyBag>(propertyBag =>
            {
                if (ingestBlock.InputCount == 0 &&
                    ingestBlock.OutputCount == 0 &&
                    !ingestBlock.Completion.IsCompleted &&
                    !ingestBlock.Completion.IsCanceled &&
                    !ingestBlock.Completion.IsFaulted &&
                    ingestBlockForAggregation.InputCount == 0 &&
                    ingestBlockForAggregation.OutputCount == 0)
                {
                    if (pipeline.Any(transformBlock => transformBlock.InputCount != 0 || transformBlock.OutputCount != 0))
                    {
                        return;
                    }

                    ingestBlock.Complete();
                }
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = 1
            });

            ingestBlock.LinkTo(ingestBlockForAggregation, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            TransformBlock <PropertyBag, PropertyBag> previous = ingestBlockForAggregation;

            foreach (TransformBlock <PropertyBag, PropertyBag> transformBlock in pipeline)
            {
                previous.LinkTo(transformBlock, new DataflowLinkOptions {
                    PropagateCompletion = true
                });
                previous = transformBlock;
            }

            previous.LinkTo(terminationCheckerBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            foreach (Uri startUri in StartUris)
            {
                ingestBlock.Post(startUri);
            }

            return(terminationCheckerBlock.Completion);
        }
        public void AddFiles(IEnumerable<FileInfo> files)
        {
            var tb = new TransformBlock<FileInfo, FileInfo>(file =>
                {
                    if (!TransferredFiles.Contains(file))
                        return file;

                    return null;
                });

            var ab = new ActionBlock<FileInfo>(file =>
            {
                if (file != null)
                    DetectedFiles.Push(file);
            });

            Parallel.ForEach(files, async file => await tb.SendAsync(file));

            tb.LinkTo(ab);

            tb.Complete();
            tb.Completion.Wait();

            //
            // Save Files
            //
            Task.Run(async () => await TransformPhysicalDisk.SecureDataSaverAsync(DetectedFiles.ToString(), DetectedData_Path, HashingPass));
        }
        public async Task StartPipelineWithBackPressureAsync(CancellationToken token)
        {
            _decoder.LoadSensorConfigs();

            // Step 1 - Configure the pipeline

            // make sure our complete call gets propagated throughout the whole pipeline
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            // create our block configurations
            var largeBufferOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 60000
            };
            var smallBufferOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 1000
            };
            var parallelizedOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 1000, MaxDegreeOfParallelism = 4
            };
            var batchOptions = new GroupingDataflowBlockOptions()
            {
                BoundedCapacity = 200000
            };

            // create some branching functions for our TransformManyBlocks
            // DecodedMessage gets tranformed into 3 RoutedMessage for the first three way branch
            Func <DecodedMessage, IEnumerable <RoutedMessage> > messageBranchFunc = x => new List <RoutedMessage>
            {
                new RoutedMessage(1, x),
                new RoutedMessage(2, x),
                new RoutedMessage(3, x)
            };

            // DecodedMessage[] gets tranformed into a RoutedBatch for the final branch
            Func <RoutedMessage[], IEnumerable <RoutedBatch> > batchBranchFunc = x => new List <RoutedBatch>
            {
                new RoutedBatch(1, x.Select(c => c.Message).ToList()),
                new RoutedBatch(2, x.Select(c => c.Message).ToList())
            };

            // define each block
            var writeRawMessageBlock = new TransformBlock <RawBusMessage, RawBusMessage>(async(RawBusMessage msg) =>
            {
                await _messageFileWriter.WriteAsync(msg);
                return(msg);
            }, largeBufferOptions);

            var decoderBlock = new TransformManyBlock <RawBusMessage, DecodedMessage>(
                (RawBusMessage msg) => _decoder.Decode(msg), largeBufferOptions);


            var msgBranchBlock = new TransformManyBlock <DecodedMessage, RoutedMessage>(messageBranchFunc, largeBufferOptions);

            var realTimeFeedBlock = new ActionBlock <RoutedMessage>(async(RoutedMessage routedMsg) =>
                                                                    await _realTimeFeedPublisher.PublishAsync(routedMsg.Message), largeBufferOptions);

            var thirtySecondBatchBlock     = new BatchBlock <RoutedMessage>(90000, batchOptions);
            var thirtySecondStatsFeedBlock = new ActionBlock <RoutedMessage[]>(async(RoutedMessage[] batch) =>
                                                                               await _statsFeedPublisher.PublishAsync(batch.Select(x => x.Message).ToList(), TimeSpan.FromSeconds(30)), smallBufferOptions);

            var oneSecondBatchBlock = new BatchBlock <RoutedMessage>(3000, batchOptions);
            var batchBroadcastBlock = new TransformManyBlock <RoutedMessage[], RoutedBatch>(batchBranchFunc, smallBufferOptions);

            var oneSecondStatsFeedBlock = new ActionBlock <RoutedBatch>(async(RoutedBatch batch) =>
                                                                        await _statsFeedPublisher.PublishAsync(batch.Messages.ToList(), TimeSpan.FromSeconds(1)), smallBufferOptions);

            var dbPersistenceBlock = new ActionBlock <RoutedBatch>(async(RoutedBatch batch) =>
                                                                   await _dbPersister.PersistAsync(batch.Messages.ToList()), parallelizedOptions);

            // link the blocks to together
            writeRawMessageBlock.LinkTo(decoderBlock, linkOptions);
            decoderBlock.LinkTo(msgBranchBlock, linkOptions);
            msgBranchBlock.LinkTo(realTimeFeedBlock, linkOptions, routedMsg => routedMsg.RouteKey == 1);      // route on the key
            msgBranchBlock.LinkTo(oneSecondBatchBlock, linkOptions, routedMsg => routedMsg.RouteKey == 2);    // route on the key
            msgBranchBlock.LinkTo(thirtySecondBatchBlock, linkOptions, routedMsg => routedMsg.RouteKey == 3); // route on the key
            thirtySecondBatchBlock.LinkTo(thirtySecondStatsFeedBlock, linkOptions);
            oneSecondBatchBlock.LinkTo(batchBroadcastBlock, linkOptions);
            batchBroadcastBlock.LinkTo(oneSecondStatsFeedBlock, linkOptions, routedMsg => routedMsg.RouteKey == 1); // route on the key
            batchBroadcastBlock.LinkTo(dbPersistenceBlock, linkOptions, routedMsg => routedMsg.RouteKey == 2);      // route on the key

            // Step 2 - Start consuming the machine bus interface (the producer)
            var consumerTask = _dataBusReader.StartConsuming(writeRawMessageBlock, token, TimeSpan.FromMilliseconds(1000), FlowControlMode.BackPressure);

            // Step 3 - Keep going until the CancellationToken is cancelled or a leaf block is the the completed state either due to a fault or the completion of the pipeline.
            while (!token.IsCancellationRequested ||
                   oneSecondStatsFeedBlock.Completion.IsCompleted ||
                   dbPersistenceBlock.Completion.IsCompleted ||
                   realTimeFeedBlock.Completion.IsCompleted ||
                   thirtySecondStatsFeedBlock.Completion.IsCompleted)
            {
                await Task.Delay(500);
            }

            // Step 4 - the CancellationToken has been cancelled and our producer has stopped producing
            // call Complete on the first block, this will propagate down the pipeline
            writeRawMessageBlock.Complete();

            // wait for all leaf blocks to finish processing their data
            await Task.WhenAll(oneSecondStatsFeedBlock.Completion,
                               thirtySecondStatsFeedBlock.Completion,
                               dbPersistenceBlock.Completion,
                               realTimeFeedBlock.Completion,
                               consumerTask);

            // clean up any other resources like ZeroMQ for example
        }
        public async Task TestOrdering_Async_OrderedDisabled()
        {
            // If ordering were enabled, this test would hang.

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false };

            var tasks = new TaskCompletionSource<int>[10];
            for (int i = 0; i < tasks.Length; i++)
            {
                tasks[i] = new TaskCompletionSource<int>();
            }

            var tb = new TransformBlock<int, int>(i => tasks[i].Task, options);
            tb.PostRange(0, tasks.Length);

            for (int i = tasks.Length - 1; i >= 0; i--)
            {
                tasks[i].SetResult(i);
                Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
            }

            tb.Complete();
            await tb.Completion;
        }
        public async Task StartPipelineAsync(CancellationToken token)
        {
            _decoder.LoadSensorConfigs();

            // Step 1 - Configure the pipeline

            // make sure our complete call gets propagated throughout the whole pipeline
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            // create our block configurations
            var largeBufferOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 600000
            };
            var smallBufferOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 1000
            };
            var realTimeBufferOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 6000
            };
            var parallelizedOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = 1000, MaxDegreeOfParallelism = 4
            };
            var batchOptions = new GroupingDataflowBlockOptions()
            {
                BoundedCapacity = 1000
            };

            // define each block
            var writeRawMessageBlock = new TransformBlock <RawBusMessage, RawBusMessage>(async(RawBusMessage msg) =>
            {
                await _messageFileWriter.WriteAsync(msg);
                return(msg);
            }, largeBufferOptions);

            var decoderBlock = new TransformManyBlock <RawBusMessage, DecodedMessage>(
                (RawBusMessage msg) => _decoder.Decode(msg), largeBufferOptions);

            var broadcast = new BroadcastBlock <DecodedMessage>(msg => msg);

            var realTimeFeedBlock = new ActionBlock <DecodedMessage>(async
                                                                         (DecodedMessage msg) => await _realTimeFeedPublisher.PublishAsync(msg), realTimeBufferOptions);

            var oneSecondBatchBlock    = new BatchBlock <DecodedMessage>(3000);
            var thirtySecondBatchBlock = new BatchBlock <DecodedMessage>(90000);
            var batchBroadcastBlock    = new BroadcastBlock <DecodedMessage[]>(msg => msg);

            var oneSecondStatsFeedBlock = new ActionBlock <DecodedMessage[]>(async
                                                                                 (DecodedMessage[] messages) => await _statsFeedPublisher.PublishAsync(messages.ToList(), TimeSpan.FromSeconds(1)), smallBufferOptions);

            var dbPersistenceBlock = new ActionBlock <DecodedMessage[]>(async
                                                                            (DecodedMessage[] messages) => await _dbPersister.PersistAsync(messages.ToList()), smallBufferOptions);
            var thirtySecondStatsFeedBlock = new ActionBlock <DecodedMessage[]>(async
                                                                                    (DecodedMessage[] messages) => await _statsFeedPublisher.PublishAsync(messages.ToList(), TimeSpan.FromSeconds(30)), smallBufferOptions);

            // link the blocks to together
            writeRawMessageBlock.LinkTo(decoderBlock, linkOptions);
            decoderBlock.LinkTo(broadcast, linkOptions);
            broadcast.LinkTo(realTimeFeedBlock, linkOptions);
            broadcast.LinkTo(oneSecondBatchBlock, linkOptions);
            broadcast.LinkTo(thirtySecondBatchBlock, linkOptions);
            oneSecondBatchBlock.LinkTo(batchBroadcastBlock, linkOptions);
            batchBroadcastBlock.LinkTo(oneSecondStatsFeedBlock, linkOptions);
            batchBroadcastBlock.LinkTo(dbPersistenceBlock, linkOptions);
            thirtySecondBatchBlock.LinkTo(thirtySecondStatsFeedBlock, linkOptions);

            // Step 2 - Start consuming the machine bus interface (the producer)
            var consumerTask = _dataBusReader.StartConsuming(writeRawMessageBlock, token, TimeSpan.FromMilliseconds(1000), FlowControlMode.LoadShed);

            // Step 3 - Keep going until the CancellationToken is cancelled or a leaf block is the the completed state either due to a fault or the completion of the pipeline.
            while (!token.IsCancellationRequested &&
                   !realTimeFeedBlock.Completion.IsCompleted &&
                   !oneSecondStatsFeedBlock.Completion.IsCompleted &&
                   !dbPersistenceBlock.Completion.IsCompleted &&
                   !thirtySecondStatsFeedBlock.Completion.IsCompleted)
            {
                await Task.Delay(500);
            }

            // Step 4 - the CancellationToken has been cancelled and our producer has stopped producing
            // call Complete on the first block, this will propagate down the pipeline
            writeRawMessageBlock.Complete();

            // wait for all leaf blocks to finish processing their data
            await Task.WhenAll(realTimeFeedBlock.Completion,
                               oneSecondStatsFeedBlock.Completion,
                               dbPersistenceBlock.Completion,
                               thirtySecondStatsFeedBlock.Completion,
                               consumerTask);

            // clean up any other resources like ZeroMQ for example
        }
Beispiel #31
0
        public string Name(IPost post)
        {
            // Keep tags separated. This is faster than enumerating
            // over a large collection with all the tags using LINQ.
            var artistTags    = new List <ITag>();
            var copyrightTags = new List <ITag>();
            var characterTags = new List <ITag>();

            // Put tags into their own lists.
            var addTagBlock = new ActionBlock <ITag?>(tag =>
            {
                switch (tag?.Kind)
                {
                case TagKind.Artist:
                    artistTags.Add(tag);
                    break;

                case TagKind.Copyright:
                    copyrightTags.Add(tag);
                    break;

                case TagKind.Character:
                    characterTags.Add(tag);
                    break;
                }
            });

            if (post is IPostExtendedTags postExtendedTags)
            {
                postExtendedTags.ExtendedTags.ForEach(tag => addTagBlock.Post(tag));

                addTagBlock.Complete();
            }
            else
            {
                // Get tags using provided function in parallel.
                var getTagBlock = new TransformBlock <string, ITag?>(
                    async tagName =>
                {
                    try
                    {
                        return(await _tagExtractorFunc(tagName).CAF());
                    }
                    catch (InvalidTagNameException)
                    {
                        Logger.Debug($"Got invalid tag '{tagName}'.", this);
                        return(null);
                    }
                },
                    new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = MaxActiveTagRequestsCount
                });

                // IMPORTANT: getTagBlock must propagate its completion if we want to wait for
                // addTagBlock's completion. This is the correct usage as per MS docs.
                getTagBlock.LinkTo(addTagBlock, _linkOptions);

                if (post is IPostExtraTags extraTags)
                {
                    void postTag(string tag) => getTagBlock.Post(tag);

                    extraTags.CharacterTags.ForEach(postTag);
                    extraTags.CopyrightTags.ForEach(postTag);
                    extraTags.ArtistTags.ForEach(postTag);
                }
                else
                {
                    foreach (string tag in post.Tags)
                    {
                        getTagBlock.Post(tag);
                    }
                }

                getTagBlock.Complete();
            }

            // Wait until all the tags are processed.
            addTagBlock.Completion.Wait();
Beispiel #32
0
        static void Main(string[] args)
        {
            //
            // Create the members of the pipeline.
            //

            // Downloads the requested resource as a string.
            var downloadString = new TransformBlock <string, string>(uri =>
            {
                Console.WriteLine("Downloading '{0}'...", uri);

                return(new WebClient().DownloadString(uri));
            });

            // Separates the specified text into an array of words.
            var createWordList = new TransformBlock <string, string[]>(text =>
            {
                Console.WriteLine("Creating word list...");

                // Remove common punctuation by replacing all non-letter characters
                // with a space character to.
                char[] tokens = text.ToArray();
                for (int i = 0; i < tokens.Length; i++)
                {
                    if (!char.IsLetter(tokens[i]))
                    {
                        tokens[i] = ' ';
                    }
                }
                text = new string(tokens);

                // Separate the text into an array of words.
                return(text.Split(new char[] { ' ' },
                                  StringSplitOptions.RemoveEmptyEntries));
            });

            // Removes short words, orders the resulting words alphabetically,
            // and then remove duplicates.
            var filterWordList = new TransformBlock <string[], string[]>(words =>
            {
                Console.WriteLine("Filtering word list...");

                return(words.Where(word => word.Length > 3).OrderBy(word => word)
                       .Distinct().ToArray());
            });

            // Finds all words in the specified collection whose reverse also
            // exists in the collection.
            var findReversedWords = new TransformManyBlock <string[], string>(words =>
            {
                Console.WriteLine("Finding reversed words...");

                // Holds reversed words.
                var reversedWords = new ConcurrentQueue <string>();

                // Add each word in the original collection to the result whose
                // reversed word also exists in the collection.
                Parallel.ForEach(words, word =>
                {
                    // Reverse the work.
                    string reverse = new string(word.Reverse().ToArray());

                    // Enqueue the word if the reversed version also exists
                    // in the collection.
                    if (Array.BinarySearch <string>(words, reverse) >= 0 &&
                        word != reverse)
                    {
                        reversedWords.Enqueue(word);
                    }
                });

                return(reversedWords);
            });

            // Prints the provided reversed words to the console.
            var printReversedWords = new ActionBlock <string>(reversedWord =>
            {
                Console.WriteLine("Found reversed words {0}/{1}",
                                  reversedWord, new string(reversedWord.Reverse().ToArray()));
            });

            //
            // Connect the dataflow blocks to form a pipeline.
            //

            downloadString.LinkTo(createWordList);
            createWordList.LinkTo(filterWordList);
            filterWordList.LinkTo(findReversedWords);
            findReversedWords.LinkTo(printReversedWords);

            //
            // For each completion task in the pipeline, create a continuation task
            // that marks the next block in the pipeline as completed.
            // A completed dataflow block processes any buffered elements, but does
            // not accept new elements.
            //

            downloadString.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)createWordList).Fault(t.Exception);
                }
                else
                {
                    createWordList.Complete();
                }
            });
            createWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)filterWordList).Fault(t.Exception);
                }
                else
                {
                    filterWordList.Complete();
                }
            });
            filterWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)findReversedWords).Fault(t.Exception);
                }
                else
                {
                    findReversedWords.Complete();
                }
            });
            findReversedWords.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)printReversedWords).Fault(t.Exception);
                }
                else
                {
                    printReversedWords.Complete();
                }
            });

            // Process "The Iliad of Homer" by Homer.
            downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt");

            // Mark the head of the pipeline as complete. The continuation tasks
            // propagate completion through the pipeline as each part of the
            // pipeline finishes.
            downloadString.Complete();

            // Wait for the last block in the pipeline to process all messages.
            printReversedWords.Completion.Wait();

            Console.ReadLine();
        }
Beispiel #33
0
        internal static bool TransformThroughDiscardingFilterToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, i => i % 2 == 0);
            t.LinkTo(DataflowBlock.NullTarget<int>());
            t.Completion.ContinueWith(_ => c.Complete());

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();

            return completedCount == ITERS / 2;
        }
Beispiel #34
0
 private void StartPipeline()
 {
     _fetcherBlock.Post("TestSupplier");
     _fetcherBlock.Complete();
 }
        public static async Task DecompressFastDataFlow(Stream inputStream, Stream outputStream)
        {
            var buffer = new BufferBlock<DecompressionDetails>(new DataflowBlockOptions { BoundedCapacity = BoundedCapacity });
            var compressorOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism,
                BoundedCapacity = BoundedCapacity
            };

            var writerOptions = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = BoundedCapacity,
                SingleProducerConstrained = true 
            };

            var compressor = new TransformBlock<DecompressionDetails, DecompressionDetails>(compressionDetails => Decompress(compressionDetails), compressorOptions);
            var writer = new ActionBlock<DecompressionDetails>(compressionDetailsWithSize => Multiplex(buffer, outputStream, compressionDetailsWithSize), writerOptions);


            buffer.LinkTo(compressor);
            compressor.LinkTo(writer);

            buffer.Completion.ContinueWith(task => compressor.Complete());      
            compressor.Completion.ContinueWith(task => writer.Complete());




            byte[] size = new byte[sizeof(long)];
            await inputStream.ReadAsync(size, 0, size.Length);
            // convert the size to number
            long sourceLength = BitConverter.ToInt64(size, 0);

            int index = 0;
            while (sourceLength > 0)
            {
                size = new byte[sizeof(long)];
                await inputStream.ReadAsync(size, 0, size.Length);

                // convert the size back to number
                long chunkSize = BitConverter.ToInt64(size, 0);
                if (chunkSize > sourceLength) throw new InvalidDataException("");

                // read the compressed size
                size = new byte[sizeof(int)];
                await inputStream.ReadAsync(size, 0, size.Length);

                // convert the size back to number
                int storedSize = BitConverter.ToInt32(size, 0);

                byte[] compressedData = new byte[storedSize];
                int readCount = await inputStream.ReadAsync(compressedData, 0, compressedData.Length);

                DecompressionDetails decompressionDetails = new DecompressionDetails
                {
                    Bytes = compressedData,
                    ChunkSize = chunkSize,             
                    Sequence = ++index
                };

                while (await buffer.SendAsync(decompressionDetails) != true) { }

                sourceLength -= chunkSize;
                if (sourceLength < chunkSize)
                    chunkSize = sourceLength;

                if (sourceLength == 0)
                    buffer.Complete();
            }
            writer.Completion.Wait();

            outputStream.Flush();
            inputStream.Dispose();
            outputStream.Dispose();
        }
        public async Task <IEnumerable <string> > ProcessInParallel(IOutputWriter logListener, int maxDegreeOfParallelism = 8)
        {
            var theResults = new List <string>();

            var processBlock = new TransformBlock <int, string>(async number =>
            {
                var processedNumber = await ProcessNumber(number);

                return(processedNumber);
            }, new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = maxDegreeOfParallelism,
                BoundedCapacity        = 10
            });

            var putInListBlock = new ActionBlock <string>(t =>
            {
                if (logListener != null)
                {
                    logListener.WriteOutput(t);
                }
                theResults.Add(t);
            }, SynchronizeForUiThread(new ExecutionDataflowBlockOptions()
            {
            }));

            processBlock.LinkTo(putInListBlock, new DataflowLinkOptions()
            {
                PropagateCompletion = true
            });


            var ttt = Task.Run(async() =>
            {
                while (true)
                {
                    Console.WriteLine($">>>>> Threads: {currentThreadsRunning}     In: {processBlock.InputCount} Out: {processBlock.OutputCount}      >>    In: {putInListBlock.InputCount} Out: {putInListBlock.Completion.IsCompleted}");
                    await Task.Delay(1000);
                }
            });

            var numbers = EnumerateNumberList(100);
            await Task.Run(async() =>
            {
                foreach (var number in numbers)
                {
                    Console.WriteLine($"Posting: {number}");
                    var result = await processBlock.SendAsync(number);

                    if (!result)
                    {
                        Console.WriteLine("Result is false!!!");
                    }
                }
            });



            Console.WriteLine("Completing");
            processBlock.Complete();
            await putInListBlock.Completion;

            return(theResults);
        }
Beispiel #37
0
        private static async Task <UsageInformation> GetUsageInformation(IEnumerable <string> files)
        {
            var result = new UsageInformation();

            var multipleInstancesOption = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount
            };
            var singleInstanceOption = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 1
            };

            var getApisBlock     = new TransformBlock <string, UsageInformation?>(GetApisUsedByAssembly, multipleInstancesOption);
            var collectApisBlock = new ActionBlock <UsageInformation?>(assemblyInfo =>
            {
                if (assemblyInfo != null)
                {
                    foreach (var targetFramework in assemblyInfo.TargetFrameworks)
                    {
                        result.TargetFrameworks.Add(targetFramework);
                    }

                    foreach (var version in assemblyInfo.Versions)
                    {
                        result.Versions.Add(version);
                    }

                    foreach (var apisByAssembly in assemblyInfo.MemberReferences)
                    {
                        if (!result.MemberReferences.TryGetValue(apisByAssembly.Key, out HashSet <string>?allApis))
                        {
                            allApis = new HashSet <string>();
                            result.MemberReferences[apisByAssembly.Key] = allApis;
                        }

                        foreach (var api in apisByAssembly.Value)
                        {
                            allApis.Add(api);
                        }
                    }
                }
            },
                                                                       singleInstanceOption);

            var linkOptions = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            getApisBlock.LinkTo(collectApisBlock, linkOptions);

            foreach (var file in files)
            {
                await getApisBlock.SendAsync(file);
            }

            getApisBlock.Complete();

            await Task.WhenAll(getApisBlock.Completion, collectApisBlock.Completion);

            return(result);
        }
 public void Reset()
 {
     _builder?.Complete();
     _builder = null;
 }
Beispiel #39
0
        public override void Execute()
        {
            SourceBufferBlock     = new BufferBlock <string[]>();
            DestinationBatchBlock = new BatchBlock <string[]>(BatchSize);
            if (Source == null)
            {
                Source = new CSVSource(FileName);
            }
            using (Source) {
                Source.Open();
                Destination = new DBDestination()
                {
                    Connection = DbConnectionManager, TableName = TableName
                };

                NLogger.Info(TaskName, TaskType, "START", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey);

                /* Pipeline:
                 * Source -> BufferBlock -> RowTransformation -> BatchBlock -> BatchTransformation -> Destination
                 * */
                RowTransformBlock   = new TransformBlock <string[], string[]>(inp => RowTransformFunction.Invoke(inp));
                BatchTransformBlock = new TransformBlock <string[][], InMemoryTable>(inp => BatchTransformFunction.Invoke(inp));
                DestinationBlock    = new ActionBlock <InMemoryTable>(outp => Destination.WriteBatch(outp));

                SourceBufferBlock.LinkTo(RowTransformBlock);
                RowTransformBlock.LinkTo(DestinationBatchBlock);
                DestinationBatchBlock.LinkTo(BatchTransformBlock);
                BatchTransformBlock.LinkTo(DestinationBlock);
                SourceBufferBlock.Completion.ContinueWith(t => { NLogger.Debug($"SoureBufferBlock DataFlow Completed: {TaskName}", TaskType, "RUN", TaskHash); RowTransformBlock.Complete(); });
                RowTransformBlock.Completion.ContinueWith(t => { NLogger.Debug($"RowTransformBlock DataFlow Completed: {TaskName}", TaskType, "RUN", TaskHash); DestinationBatchBlock.Complete(); });
                DestinationBatchBlock.Completion.ContinueWith(t => { NLogger.Debug($"DestinationBatchBlock DataFlow Completed: {TaskName}", TaskType, "RUN", TaskHash); BatchTransformBlock.Complete(); });
                BatchTransformBlock.Completion.ContinueWith(t => { NLogger.Debug($"BatchTransformBlock DataFlow Completed: {TaskName}", TaskType, "RUN", TaskHash); DestinationBlock.Complete(); });

                Source.Read(RowTransformBlock);
                SourceBufferBlock.Complete();
                DestinationBlock.Completion.Wait();

                NLogger.Info(TaskName, TaskType, "END", TaskHash, ControlFlow.STAGE, ControlFlow.CurrentLoadProcess?.LoadProcessKey);
            }
        }
Beispiel #40
0
        public async Task AnalyseAsync(string authorization, string inputSource, string reportOutput, bool doContentCheck = false)
        {
            if (string.IsNullOrEmpty(authorization))
            {
                throw new ArgumentNullException("You must provide the target bot (node) for this action.");
            }

            if (string.IsNullOrEmpty(inputSource))
            {
                throw new ArgumentNullException("You must provide the input source (phrase or file) for this action.");
            }

            if (string.IsNullOrEmpty(reportOutput))
            {
                throw new ArgumentNullException("You must provide the full output's report file name for this action.");
            }

            _logger.LogDebug("COMEÇOU!");

            _fileService.CreateDirectoryIfNotExists(reportOutput);

            var bucketStorage   = new BucketStorage("Key " + authorization);
            var contentProvider = new Take.ContentProvider.ContentProvider(bucketStorage, 5);
            var client          = _blipClientFactory.GetInstanceForAI(authorization);

            var allIntents = new List <Intention>();

            if (doContentCheck)
            {
                _logger.LogDebug("\tCarregando intencoes...");
                allIntents = await client.GetAllIntentsAsync();

                _logger.LogDebug("\tCarregadas!");
            }
            bool isPhrase = false;

            var isDirectory = _fileService.IsDirectory(inputSource);
            var isFile      = _fileService.IsFile(inputSource);

            if (isFile)
            {
                _logger.LogDebug("\tA entrada é um arquivo");
                isPhrase = false;
            }
            else
            if (isDirectory)
            {
                _logger.LogError("\tA entrada é um diretório");
                throw new ArgumentNullException("You must provide the input source (phrase or file) for this action. Your input was a direcory.");
            }
            else
            {
                _logger.LogDebug("\tA entrada é uma frase");
                isPhrase = true;
            }

            var options = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity        = DataflowBlockOptions.Unbounded,
                MaxDegreeOfParallelism = 20,
            };

            var analyseBlock    = new TransformBlock <NLPAnalyseDataBlock, NLPAnalyseDataBlock>((Func <NLPAnalyseDataBlock, Task <NLPAnalyseDataBlock> >)AnalyseForMetrics, options);
            var checkBlock      = new TransformBlock <NLPAnalyseDataBlock, NLPAnalyseDataBlock>((Func <NLPAnalyseDataBlock, NLPAnalyseDataBlock>)CheckResponse, options);
            var contentBlock    = new TransformBlock <NLPAnalyseDataBlock, NLPAnalyseDataBlock>((Func <NLPAnalyseDataBlock, Task <NLPAnalyseDataBlock> >)GetContent, options);
            var showResultBlock = new ActionBlock <NLPAnalyseDataBlock>(BuildResult, new ExecutionDataflowBlockOptions
            {
                BoundedCapacity    = DataflowBlockOptions.Unbounded,
                MaxMessagesPerTask = 1
            });

            analyseBlock.LinkTo(checkBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            checkBlock.LinkTo(contentBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            contentBlock.LinkTo(showResultBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });

            _count = 0;

            var inputList = await GetInputList(isPhrase, inputSource, client, reportOutput, allIntents, contentProvider, doContentCheck);

            _total = inputList.Count;
            foreach (var input in inputList)
            {
                await analyseBlock.SendAsync(input);
            }

            analyseBlock.Complete();
            await showResultBlock.Completion;

            _logger.LogDebug("TERMINOU!");
        }
        public void ProduceLogs(int count, int buffSize)
        {
            var writerOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = 10, MaxDegreeOfParallelism = 1, MaxMessagesPerTask = 10 };
            var serializerOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = buffSize, MaxDegreeOfParallelism = 8, SingleProducerConstrained = true };

            LogGenerator g = new LogGenerator();

            var file = new StreamWriter("basic.async.srlz.log", false);

            TransformBlock<LogEntry, string> serializer = new TransformBlock<LogEntry, string>(
                e => string.Format(e.format, e.parameters),
                serializerOptions);

            ActionBlock<string> writer = new ActionBlock<string>(s => file.WriteLine(s), writerOptions);

            serializer.LinkTo(writer, new DataflowLinkOptions() { PropagateCompletion = true });

            for (int i = 0; i < count; i++)
            {
                g.Next();

                var entry = new LogEntry() { format = g.FormatStr, parameters = new object[] { g.Param1, g.Param2, g.Param3, g.Param4, g.Param5, g.Param6 } };
                serializer.SendAsync(entry).Wait();
            }

            serializer.Complete();

            Completed = writer.Completion.ContinueWith(t => file.Close());
        }
Beispiel #42
0
        //[Fact(Skip = "Outerloop")]
        public void RunTransformBlockConformanceTests()
        {
            bool passed = true;

            // SYNC
            #region Sync
            {
                // Do everything twice - once through OfferMessage and Once through Post
                for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++)
                {
                    Func<DataflowBlockOptions, TargetProperties<int>> transformBlockFactory =
                        options =>
                        {
                            TransformBlock<int, int> transformBlock = new TransformBlock<int, int>(i => i, (ExecutionDataflowBlockOptions)options);
                            ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);

                            transformBlock.LinkTo(actionBlock);

                            return new TargetProperties<int> { Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false };
                        };
                    CancellationTokenSource cancellationSource = new CancellationTokenSource();
                    var defaultOptions = new ExecutionDataflowBlockOptions();
                    var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                    var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2 };
                    var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token };

                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, dopOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 1, Intervention.Complete, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, cancellationOptions, 1, Intervention.Cancel, cancellationSource, feedMethod, true);
                }

                // Test chained Post/Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.Post(i);
                        localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync/Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.SendAsync(i);
                        localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained Post all then Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2));
                    for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true;
                    for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16;
                    Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync all then Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => i * 2));
                    var tasks = new Task[ITERS];
                    for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i);
                    Task.WaitAll(tasks);
                    int total = 0;
                    for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive();
                    localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16));
                    Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that OperationCanceledExceptions are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock<int, int>(i =>
                    {
                        if ((i % 2) == 0) throw new OperationCanceledException();
                        return i;
                    });
                    for (int i = 0; i < 2; i++) t.Post(i);
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0) localPassed &= t.Receive() == i;
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test using a precanceled token
                {
                    bool localPassed = true;
                    try
                    {
                        var cts = new CancellationTokenSource();
                        cts.Cancel();
                        var dbo = new ExecutionDataflowBlockOptions { CancellationToken = cts.Token };
                        var t = new TransformBlock<int, int>(i => i, dbo);

                        int ignoredValue;
                        IList<int> ignoredValues;
                        localPassed &= t.LinkTo(new ActionBlock<int>(delegate { })) != null;
                        localPassed &= t.SendAsync(42).Result == false;
                        localPassed &= t.TryReceiveAll(out ignoredValues) == false;
                        localPassed &= t.Post(42) == false;
                        localPassed &= t.OutputCount == 0;
                        localPassed &= t.TryReceive(out ignoredValue) == false;
                        localPassed &= t.Completion != null;
                        t.Complete();
                    }
                    catch (Exception)
                    {
                        localPassed = false;
                    }
                    Console.WriteLine("    {0}: Precanceled tokens work correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting
                {
                    bool localPassed = true;
                    var t = new TransformBlock<int, int>(new Func<int, int>(i => { throw new InvalidOperationException(); }));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }
            }
            #endregion

            #region Async
            // ASYNC (a copy of the sync but with constructors returning Task<T> instead of T
            {
                // Do everything twice - once through OfferMessage and Once through Post
                for (FeedMethod feedMethod = FeedMethod._First; passed & feedMethod < FeedMethod._Count; feedMethod++)
                {
                    Func<DataflowBlockOptions, TargetProperties<int>> transformBlockFactory =
                        options =>
                        {
                            TransformBlock<int, int> transformBlock = new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i), (ExecutionDataflowBlockOptions)options);
                            ActionBlock<int> actionBlock = new ActionBlock<int>(i => TrackCaptures(i), (ExecutionDataflowBlockOptions)options);

                            transformBlock.LinkTo(actionBlock);

                            return new TargetProperties<int> { Target = transformBlock, Capturer = actionBlock, ErrorVerifyable = false };
                        };
                    CancellationTokenSource cancellationSource = new CancellationTokenSource();
                    var defaultOptions = new ExecutionDataflowBlockOptions();
                    var dopOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount };
                    var mptOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2 };
                    var cancellationOptions = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = Environment.ProcessorCount, MaxMessagesPerTask = 2, CancellationToken = cancellationSource.Token };

                    passed &= FeedTarget(transformBlockFactory, defaultOptions, 1, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, dopOptions, 10, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.None, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, mptOptions, 10000, Intervention.Complete, null, feedMethod, true);
                    passed &= FeedTarget(transformBlockFactory, cancellationOptions, 10000, Intervention.Cancel, cancellationSource, feedMethod, true);
                }

                // Test chained Post/Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.Post(i);
                        localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained Post/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync/Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++)
                    {
                        network.SendAsync(i);
                        localPassed &= (((IReceivableSourceBlock<int>)network).Receive() == i * 16);
                    }
                    Console.WriteLine("{0}: Chained SendAsync/Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained Post all then Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    for (int i = 0; i < ITERS; i++) localPassed &= network.Post(i) == true;
                    for (int i = 0; i < ITERS; i++) localPassed &= ((IReceivableSourceBlock<int>)network).Receive() == i * 16;
                    Console.WriteLine("{0}: Chained Post all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test chained SendAsync all then Receive
                {
                    bool localPassed = true;
                    const int ITERS = 2;
                    var network = Chain<TransformBlock<int, int>, int>(4, () => new TransformBlock<int, int>(i => Task.Factory.StartNew(() => i * 2)));
                    var tasks = new Task[ITERS];
                    for (int i = 1; i <= ITERS; i++) tasks[i - 1] = network.SendAsync(i);
                    Task.WaitAll(tasks);
                    int total = 0;
                    for (int i = 1; i <= ITERS; i++) total += ((IReceivableSourceBlock<int>)network).Receive();
                    localPassed &= (total == ((ITERS * (ITERS + 1)) / 2 * 16));
                    Console.WriteLine("{0}: Chained SendAsync all then Receive", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that OperationCanceledExceptions are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock<int, int>(i =>
                    {
                        if ((i % 2) == 0) throw new OperationCanceledException();
                        return Task.Factory.StartNew(() => i);
                    });
                    for (int i = 0; i < 2; i++) t.Post(i);
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0) localPassed &= t.Receive() == i;
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: OperationCanceledExceptions are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that null tasks are ignored
                {
                    bool localPassed = true;

                    var t = new TransformBlock<int, int>(i =>
                    {
                        if ((i % 2) == 0) return null;
                        return Task.Factory.StartNew(() => i);
                    });
                    for (int i = 0; i < 2; i++) t.Post(i);
                    t.Complete();
                    for (int i = 0; i < 2; i++)
                    {
                        if ((i % 2) != 0) localPassed &= t.Receive() == i;
                    }
                    t.Completion.Wait();
                    Console.WriteLine("{0}: null tasks are ignored", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test that null tasks are ignored when a reordering buffer is in place
                {
                    bool localPassed = true;

                    var t = new TransformBlock<int, int>(i =>
                    {
                        if (i == 0)
                        {
                            Task.Delay(10).Wait();
                            return null;
                        }
                        return Task.Factory.StartNew(() => i);
                    }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2 });
                    t.Post(0);
                    t.Post(1);
                    try
                    {
                        localPassed &= t.Receive(TimeSpan.FromSeconds(4)) == 1;
                    }
                    catch
                    {
                        localPassed = false;
                    }
                    Console.WriteLine("{0}: null tasks are ignored with reordering buffer", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting from the delegate
                {
                    bool localPassed = true;
                    var t = new TransformBlock<int, int>(new Func<int, Task<int>>(i => { throw new InvalidOperationException(); }));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted from delegate handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }

                // Test faulting from the task
                {
                    bool localPassed = true;
                    var t = new TransformBlock<int, int>(new Func<int, Task<int>>(i => Task<int>.Factory.StartNew(() => { throw new InvalidOperationException(); })));
                    t.Post(42);
                    t.Post(1);
                    t.Post(2);
                    t.Post(3);
                    try { t.Completion.Wait(); }
                    catch { }
                    localPassed &= t.Completion.IsFaulted;
                    localPassed &= SpinWait.SpinUntil(() => t.InputCount == 0, 500);
                    localPassed &= SpinWait.SpinUntil(() => t.OutputCount == 0, 500);
                    localPassed &= t.Post(4) == false;
                    Console.WriteLine("    {0}: Faulted from task handled correctly", localPassed ? "Success" : "Failure");
                    passed &= localPassed;
                }
            }
            #endregion

            Assert.True(passed, "Test failed.");
        }
        public static async Task Compress(Stream inputStream, Stream outputStream)
        {
            var buffer = new BufferBlock<CompressionDetails>(new DataflowBlockOptions { BoundedCapacity = BoundedCapacity });
            var compressorOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = MaxDegreeOfParallelism,
                BoundedCapacity = BoundedCapacity
            };

            var writerOptions = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = BoundedCapacity,
                SingleProducerConstrained = true
            };

            var compressor = new TransformBlock<CompressionDetails, CompressionDetails>(compressionDetails => Compress(compressionDetails), compressorOptions);
            var writer = new ActionBlock<CompressionDetails>(compressionDetailsWithSize => Multiplex(outputStream, compressionDetailsWithSize), writerOptions);

            buffer.LinkTo(compressor);
            compressor.LinkTo(writer);
            
            buffer.Completion.ContinueWith(task => compressor.Complete()); 
            compressor.Completion.ContinueWith(task => writer.Complete());

            long sourceLength = inputStream.Length;
            // Write total size to destination
            byte[] size = BitConverter.GetBytes(sourceLength);
            await outputStream.WriteAsync(size, 0, size.Length);

            long chunkSize = 1048576; // 1 MB
            int index = 0;
            while (sourceLength > 0)
            {
                byte[] data = new byte[chunkSize];
                int readCount = await inputStream.ReadAsync(data, 0, data.Length);

                byte[] bytes = new byte[readCount];
                Buffer.BlockCopy(data, 0, bytes, 0, readCount);

                CompressionDetails compressionDetails = new CompressionDetails
                {
                    Bytes = bytes,
                    ChunkSize = BitConverter.GetBytes(chunkSize),
                    Sequence = ++index
                };

                while (await buffer.SendAsync(compressionDetails) != true) { }

                sourceLength -= chunkSize;
                if (sourceLength < chunkSize)
                    chunkSize = sourceLength;

                if (sourceLength == 0)
                    buffer.Complete();
            }
            writer.Completion.Wait();

            await outputStream.FlushAsync();
            inputStream.Dispose();
            outputStream.Dispose();
        }
        public async Task TestPrecanceled()
        {
            var bb = new TransformBlock<int, int>(i => i,
                new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(canceled: true) });

            int ignoredValue;
            IList<int> ignoredValues;

            IDisposable link = bb.LinkTo(DataflowBlock.NullTarget<int>());
            Assert.NotNull(link);
            link.Dispose();
            
            Assert.False(bb.Post(42));
            var t = bb.SendAsync(42);
            Assert.True(t.IsCompleted);
            Assert.False(t.Result);

            Assert.False(bb.TryReceiveAll(out ignoredValues));
            Assert.False(bb.TryReceive(out ignoredValue));

            Assert.NotNull(bb.Completion);
            await Assert.ThrowsAnyAsync<OperationCanceledException>(() => bb.Completion);
            bb.Complete(); // just make sure it doesn't throw
        }
Beispiel #45
0
 public void Complete()
 {
     transformBlock.Complete();
 }
        public async Task TestNullTasksIgnored()
        {
            foreach (int dop in new[] { DataflowBlockOptions.Unbounded, 1, 2 })
            {
                var tb = new TransformBlock<int, int>(i => {
                    if ((i % 2) == 0) return null;
                    return Task.Run(() => i);
                }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop });

                const int Iters = 100;
                tb.PostRange(0, Iters);
                tb.Complete();

                for (int i = 0; i < Iters; i++)
                {
                    if ((i % 2) != 0)
                    {
                        Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
                    }
                }
                await tb.Completion;
            }
        }
Beispiel #47
0
        ///<summary>全ハッシュを一定サイズに分割してソートする
        ///分割されたファイルをマージソートすると完全なソート済み列が得られる</summary>
        public static async Task <int> QuickSortAll(int Index, long SortMask)
        {
            int FileCount = 0;
            var SortComp  = new BlockSortComparer(SortMask);

            //これにソート用の配列を入れてメモリ割り当てを減らしてみる
            var  LongPool       = new ConcurrentBag <long[]>();
            bool LongPoolReturn = true;
            //ソート用の配列の個数に制限を設ける
            var FirstSortSemaphore = new SemaphoreSlim(config.hash.InitialSortConcurrency);

            //ソートは並列 書き込みは並列させない
            var FirstSortBlock = new TransformBlock <FirstSort, FirstSort>(async(t) =>
            {
                await QuickSortParllel(SortMask, t.ToSort, t.Length, SortComp).ConfigureAwait(false);
                return(t);
            }, new ExecutionDataflowBlockOptions()
            {
                SingleProducerConstrained = true,
                MaxDegreeOfParallelism    = config.hash.InitialSortConcurrency,
            });
            var WriterBlock = new ActionBlock <FirstSort>((t) =>
            {
                using (var writer = new UnbufferedLongWriter(t.WriteFilePath))
                {
                    writer.WriteDestructive(t.ToSort, t.Length);
                }
                //ここでLongPoolに配列を返却する
                if (LongPoolReturn)
                {
                    LongPool.Add(t.ToSort);
                }
                FirstSortSemaphore.Release();
            }, new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 1
            });

            FirstSortBlock.LinkTo(WriterBlock, new DataflowLinkOptions()
            {
                PropagateCompletion = true
            });

            //まずはAllHashを読む
            using (var reader = new UnbufferedLongReader(AllHashFilePath))
            {
                for (; reader.Readable; FileCount++)
                {
                    await FirstSortSemaphore.WaitAsync().ConfigureAwait(false);

                    if (!LongPool.TryTake(out var ToSort))
                    {
                        ToSort = new long[InitialSortUnit];
                    }
                    int ToSortLength = reader.Read(ToSort);
                    FirstSortBlock.Post(new FirstSort(SortingFilePath(Index, FileCount), ToSort, ToSortLength));
                }
            }

            //NewerHashを読む
            int ToSortNewerCursor = 0;
            await FirstSortSemaphore.WaitAsync().ConfigureAwait(false);

            if (!LongPool.TryTake(out var ToSortNewer))
            {
                ToSortNewer = new long[InitialSortUnit];
            }
            foreach (var filePath in Directory.EnumerateFiles(config.hash.TempDir, Path.GetFileName(NewerHashFilePathBase("*"))))
            {
                using (var reader = new BufferedLongReader(filePath))
                {
                    while (reader.Readable)
                    {
                        for (; ToSortNewerCursor < ToSortNewer.Length; ToSortNewerCursor++)
                        {
                            if (!reader.MoveNext(out var next))
                            {
                                break;
                            }
                            ToSortNewer[ToSortNewerCursor] = next;
                        }
                        if (InitialSortUnit <= ToSortNewerCursor)
                        {
                            FirstSortBlock.Post(new FirstSort(SortingFilePath(Index, FileCount), ToSortNewer, ToSortNewer.Length));
                            FileCount++;
                            ToSortNewerCursor = 0;
                            await FirstSortSemaphore.WaitAsync().ConfigureAwait(false);

                            if (!LongPool.TryTake(out ToSortNewer))
                            {
                                ToSortNewer = new long[InitialSortUnit];
                            }
                        }
                    }
                }
            }
            //余った要素もソートさせる FirstSortingCountはもう使わないので放置
            if (0 < ToSortNewerCursor)
            {
                FirstSortBlock.Post(new FirstSort(SortingFilePath(Index, FileCount), ToSortNewer, ToSortNewerCursor));
                FileCount++;
            }
            FirstSortBlock.Complete();
            //ソート用配列は作り終わったので用が済んだ配列は解放させる
            LongPoolReturn = false;
            LongPool.Clear();

            await WriterBlock.Completion.ConfigureAwait(false);

            return(FileCount);
        }
        /// <summary>
        ///                              Recieve Data
        ///                                    |
        ///                                Broadcast
        ///                  __________________|_________________
        ///              Document      Involved Parties        Order
        ///                 |__________________|_________________|
        ///                                    |
        ///                            Compile InformationS
        /// </summary>
        public void StartPipeline()
        {
            ////TODO Need to add Buffer block to recieve batch post, then fill pipeline
            //Broadcasts the received data
            broadcastOrder = new BroadcastBlock <Order>(order => order);
            //Joins all the parallel processes together to output for compiling
            var joinblock = new JoinBlock <DocumentTransform, InvolvedPartyTransforms, OrderTransform>(new GroupingDataflowBlockOptions {
                Greedy = false
            });

            //Add cancelation to the pipeline
            cancellationTokenSource = new CancellationTokenSource();

            ExecutionDataflowBlockOptions executionDataflowBlockOptions = new ExecutionDataflowBlockOptions
            {
                CancellationToken = cancellationTokenSource.Token
            };

            //Receives input orders
            receiveData = new TransformBlock <Order, Order>(order =>
            {
                return(order != null ? order : null);
            }, executionDataflowBlockOptions);

            //Processes Client/Lender information
            processInvolvedParties = new TransformBlock <Order, InvolvedPartyTransforms>(order =>
            {
                if (ClientVerification.VerifyClient(order.clientInformation).Result&& ClientVerification.VerifyLender(order.lenderInformation).Result)
                {
                    ClientTransform clientTransform = new ClientTransform
                    {
                        ClientInformation = order.clientInformation
                    };
                    LenderTransform lenderTransform = new LenderTransform
                    {
                        lenderInformation = order.lenderInformation
                    };
                    InvolvedPartyTransforms involvedPartyTransforms = new InvolvedPartyTransforms
                    {
                        lenderTransform = lenderTransform,
                        clientTransform = clientTransform
                    };
                    return(involvedPartyTransforms);
                }
                return(null);
            }, executionDataflowBlockOptions);

            //Processes Order Information
            processOrder = new TransformBlock <Order, OrderTransform>(order =>
            {
                OrderTransform orderTransform = new OrderTransform
                {
                    order = order
                };
                return(orderTransform);
            }, executionDataflowBlockOptions);

            //TODO add more processing
            //Processes Documents from order
            processDocuments = new TransformBlock <Order, DocumentTransform>(order =>
            {
                List <Document> documents = order.documents;
                //if(documents.Count < 0)
                //{ return null; }

                //foreach(Document document in documents)
                //{
                //    if(document == null)
                //    { return null; }
                //}
                //TODO generate events
                DocumentTransform documentTransform = new DocumentTransform
                {
                    documents = documents
                };
                return(documentTransform);
            }, executionDataflowBlockOptions);

            //Compiles the information back after processing
            compileInformation = new TransformBlock <Tuple <DocumentTransform, InvolvedPartyTransforms, OrderTransform>, Order>((transforms) =>
            {
                Console.WriteLine("Compiling Order #{0}", transforms.Item3.order.orderId);
                Console.WriteLine("Lender: {0} | Client: {1}",
                                  transforms.Item2.lenderTransform.lenderInformation.lenderName,
                                  transforms.Item2.clientTransform.ClientInformation.clientFName + ' ' +
                                  transforms.Item2.clientTransform.ClientInformation.clientLName);
                return(transforms.Item3.order);
            }, executionDataflowBlockOptions);

            //Linking options
            var options = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            receiveData.LinkTo(broadcastOrder);
            //Broadcasts received data to processing blocks
            broadcastOrder.LinkTo(processInvolvedParties);
            broadcastOrder.LinkTo(processOrder);
            broadcastOrder.LinkTo(processDocuments);

            processDocuments.LinkTo(joinblock.Target1, options);
            processInvolvedParties.LinkTo(joinblock.Target2, options);
            processOrder.LinkTo(joinblock.Target3, options);
            joinblock.LinkTo(compileInformation, options);

            //Creates random orders and posts them to the Pipeline Batchblock
            //for (int i = 0; i < 50; i++)
            //{
            //    Random random = new Random();

            //    Order order = new Order().RandomOrder();

            //}

            //Currently is just manually loading two orders
            Order orderRand = new Order().RandomOrder();

            var anotherOrder = new Order().RandomOrder();

            receiveData.Post(orderRand);
            receiveData.Post(anotherOrder);

            receiveData.Complete();


            const int TIMEOUT = 30000;
            //compileInformation.Completion.Wait(TIMEOUT);

            //Currently is just manually receiving two orders
            var test = compileInformation.ReceiveAsync(cancellationTokenSource.Token);

            Console.WriteLine("One: " + test.Result.orderId);

            test = compileInformation.ReceiveAsync(cancellationTokenSource.Token);

            Console.WriteLine("Two: " + test.Result.orderId);
        }
Beispiel #49
0
		public void AsyncNullTest ()
		{
			var scheduler = new TestScheduler ();
			var block = new TransformBlock<int, int> (
				i => null,
				new ExecutionDataflowBlockOptions { TaskScheduler = scheduler });

			Assert.IsTrue (block.Post (1));

			scheduler.ExecuteAll ();

			Assert.IsFalse (block.Completion.Wait (100));

			block.Complete ();

			Assert.IsTrue (block.Completion.Wait (100));
		}
Beispiel #50
0
        public static async Task <IEnumerable <Activity> > GetActivities(Folder folder, Route route, CancellationToken token)
        {
            if (null == folder)
            {
                throw new ArgumentNullException(nameof(folder));
            }
            if (null == route)
            {
                throw new ArgumentNullException(nameof(route));
            }

            using (SemaphoreSlim addItem = new SemaphoreSlim(1))
            {
                List <Activity> result = new List <Activity>();
                string          activitiesDirectory = route.RouteFolder.ActivitiesFolder;
                result.Add(DefaultExploreActivity);
                result.Add(ExploreThroughActivity);

                if (Directory.Exists(activitiesDirectory))
                {
                    TransformBlock <string, Activity> inputBlock = new TransformBlock <string, Activity>
                                                                       (activityFile =>
                    {
                        return(FromPath(activityFile, folder, route));
                    },
                                                                       new ExecutionDataflowBlockOptions {
                        MaxDegreeOfParallelism = System.Environment.ProcessorCount, CancellationToken = token
                    });


                    ActionBlock <Activity> actionBlock = new ActionBlock <Activity>
                                                             (async activity =>
                    {
                        if (activity == null)
                        {
                            return;
                        }
                        try
                        {
                            await addItem.WaitAsync(token).ConfigureAwait(false);
                            result.Add(activity);
                        }
                        finally
                        {
                            addItem.Release();
                        }
                    });

                    inputBlock.LinkTo(actionBlock, new DataflowLinkOptions {
                        PropagateCompletion = true
                    });

                    foreach (string activityFile in Directory.EnumerateFiles(activitiesDirectory, "*.act"))
                    {
                        await inputBlock.SendAsync(activityFile).ConfigureAwait(false);
                    }

                    inputBlock.Complete();
                    await actionBlock.Completion.ConfigureAwait(false);
                }
                return(result);
            }
        }
Beispiel #51
0
        /// <summary>
        /// THIS CODE IS JUST EXAMPLE AND NOT INTENDED FOR PRODUCTION USE
        /// </summary>
        /// <param name="args"></param>
        static void Main(string[] args)
        {
            // Create the cancellation source.
            var cancellationSource = new CancellationTokenSource();

            var inputWorkBufferBlock = new BufferBlock<Uri>();

            // Input - Uri - seed address
            // Output - Uri - key, content, content-type
            var downloaderBlock = new TransformBlock<Uri, string>(address =>
                {
                    var httpClient = new HttpClient();
                    // Downloads the requested resource as a string.
                    Console.WriteLine("Downloading '{0}'... Thread id {1}", address.OriginalString, Thread.CurrentThread.ManagedThreadId);

                    var contentType = string.Empty;
                    var content = httpClient.GetAsync(address).ContinueWith(task =>
                        {
                            HttpResponseMessage response = task.Result;
                            if (task.Result.IsSuccessStatusCode)
                            {
                                return task.Result.Content.ReadAsStringAsync();
                            }

                            return new Task<string>(() => null);
                        }).Unwrap();

                    return content.Result;
                }, new ExecutionDataflowBlockOptions
                {
                    CancellationToken = cancellationSource.Token,
                    MaxDegreeOfParallelism = 5
                });

            var outputBufferBlock = new BufferBlock<string>();
            var saverBlock = new ActionBlock<string>(content =>
                {
                    if (content != null)
                    {
                        const string targetPath = "c:\\work\\tmp";
                        const string extension = ".html";
                        var fileName = Path.ChangeExtension(Path.Combine(targetPath, Path.GetRandomFileName()), extension);
                        Console.WriteLine("Saving {0} ...Thread: {1}", fileName, Thread.CurrentThread.ManagedThreadId);
                        using (var stream = new StreamWriter(fileName))
                        {
                            stream.Write(content);
                        }
                    }
                }, new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 1
                });

            // Blocks linking
            inputWorkBufferBlock.LinkTo(downloaderBlock);
            // Filtering, skips empty response
            downloaderBlock.LinkTo(outputBufferBlock, s => !string.IsNullOrWhiteSpace(s));
            outputBufferBlock.LinkTo(saverBlock);

            // Propagating completition
            inputWorkBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)downloaderBlock).Fault(t.Exception);
                else downloaderBlock.Complete();
            });
            downloaderBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)outputBufferBlock).Fault(t.Exception);
                else outputBufferBlock.Complete();
            });
            outputBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)outputBufferBlock).Fault(t.Exception);
                else outputBufferBlock.Complete();
            });
            outputBufferBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)saverBlock).Fault(t.Exception);
                else saverBlock.Complete();
            });

            // Message passing
            inputWorkBufferBlock.Post(new Uri("http://svnbook.red-bean.com/nightly/ru/svn-book.html"));
            inputWorkBufferBlock.Post(new Uri("http://bash.im"));
            inputWorkBufferBlock.Post(new Uri("http://habrahabr.ru"));
            inputWorkBufferBlock.Post(new Uri("http://lb.ua"));
            inputWorkBufferBlock.Post(new Uri("http://blogs.msdn.com/b/pfxteam/"));
            inputWorkBufferBlock.Post(new Uri("http://hgbook.red-bean.com/read/a-tour-of-mercurial-merging-work.html"));
            inputWorkBufferBlock.Complete();

            saverBlock.Completion.Wait();
            Console.WriteLine("Job is DONE...");
            Console.WriteLine("Hit ANY KEY to exit...");
            Console.ReadKey();
        }
    public void Process(List <string> filters, CancellationToken token)
    {
        var linkOptions = new DataflowLinkOptions {
            PropagateCompletion = true
        };
        Func <IServiceMessage, RoutedMessage> partitioner = x => new RoutedMessage
        {
            Message     = x,
            PartitionId = x.Id.GetHashCode() / 1000000000
        };

        var partitionerBlock = new TransformBlock <IServiceMessage, RoutedMessage>(partitioner);
        var actionBlock1     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock2     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock3     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock4     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock5     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock6     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock7     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock8     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));
        var actionBlock9     = new ActionBlock <RoutedMessage>(async(RoutedMessage msg) => await _service.ProcessAsync(msg));

        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == -4);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == -3);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == -2);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == -1);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == 0);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == 1);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == 2);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == 3);
        partitionerBlock.LinkTo(actionBlock1, linkOptions, msg => msg.PartitionId == 4);

        var tasks = new List <Task>();

        foreach (var filter in filters)
        {
            tasks.Add(Task.Run(async() =>
            {
                Guid filterId = Guid.NewGuid();

                while (!token.IsCancellationRequested)
                {
                    var message = await _serviceClient.GetAsync(filter);
                    message.Id  = filterId;
                    await partitionerBlock.SendAsync(message);
                }
            }));
        }

        while (!token.IsCancellationRequested)
        {
            Thread.Sleep(100);
        }

        partitionerBlock.Complete();
        actionBlock1.Completion.Wait();
        actionBlock2.Completion.Wait();
        actionBlock3.Completion.Wait();
        actionBlock4.Completion.Wait();
        actionBlock5.Completion.Wait();
        actionBlock6.Completion.Wait();
        actionBlock7.Completion.Wait();
        actionBlock8.Completion.Wait();
        actionBlock9.Completion.Wait();

        Task.WaitAll(tasks.ToArray(), 10000);
    }
		public Task RunAsync()
		{
			TransformBlock<Uri, PropertyBag> ingestBlock = new TransformBlock<Uri, PropertyBag>(input =>
			{
				PropertyBag result = new PropertyBag
				{
					OriginalUrl = input.ToString(),
					UserAgent = _userAgent,
					Step = new CrawlStep(input, 0)
				};

				return result;
			}, new ExecutionDataflowBlockOptions
			{
				MaxDegreeOfParallelism = MaxDegreeOfParallelism
			});

			TransformBlock<PropertyBag, PropertyBag> ingestBlockForAggregation =
				new TransformBlock<PropertyBag, PropertyBag>(input => input, new ExecutionDataflowBlockOptions
				{
					MaxDegreeOfParallelism = MaxDegreeOfParallelism
				});

			CrawlIngestionHelper crawlIngestionHelper = new CrawlIngestionHelper(ingestBlockForAggregation, _userAgent);
			TransformBlock<PropertyBag, PropertyBag>[] pipeline = Pipeline
				.Select(pipelineStep =>
				{
					return new TransformBlock<PropertyBag, PropertyBag>(async propertyBag =>
					{
						if (propertyBag.StopPipelining)
						{
							return propertyBag;
						}

						try
						{
							propertyBag.StopPipelining = !await pipelineStep.Process(crawlIngestionHelper, propertyBag);
						}
						catch (Exception exception)
						{
							propertyBag.Exceptions.Add(exception);
						}

						return propertyBag;
					}, new ExecutionDataflowBlockOptions
					{
						MaxDegreeOfParallelism = pipelineStep.MaxDegreeOfParallelism
					});
				})
				.ToArray();

			ActionBlock<PropertyBag> terminationCheckerBlock = new ActionBlock<PropertyBag>(propertyBag =>
			{
				if (ingestBlock.InputCount == 0
					&& ingestBlock.OutputCount == 0
					&& !ingestBlock.Completion.IsCompleted
					&& !ingestBlock.Completion.IsCanceled
					&& !ingestBlock.Completion.IsFaulted
					&& ingestBlockForAggregation.InputCount == 0
					&& ingestBlockForAggregation.OutputCount == 0)
				{
					if (pipeline.Any(transformBlock => transformBlock.InputCount != 0 || transformBlock.OutputCount != 0))
					{
						return;
					}

					ingestBlock.Complete();
				}
			}, new ExecutionDataflowBlockOptions {MaxDegreeOfParallelism = 1});

			ingestBlock.LinkTo(ingestBlockForAggregation, new DataflowLinkOptions {PropagateCompletion = true});
			TransformBlock<PropertyBag, PropertyBag> previous = ingestBlockForAggregation;
			foreach (TransformBlock<PropertyBag, PropertyBag> transformBlock in pipeline)
			{
				previous.LinkTo(transformBlock, new DataflowLinkOptions {PropagateCompletion = true});
				previous = transformBlock;
			}

			previous.LinkTo(terminationCheckerBlock, new DataflowLinkOptions {PropagateCompletion = true});
			foreach (Uri startUri in StartUris)
			{
				ingestBlock.Post(startUri);
			}

			return terminationCheckerBlock.Completion;
		}
Beispiel #54
0
        public static void Run()
        {
            //
            // Create the members of the pipeline.
            //
            // Downloads the requested resource as a string.
            TransformBlock <string, string> downloadString = new TransformBlock <string, string>(async uri =>
            {
                Console.WriteLine("Downloading '{0}'...", uri);
                Console.WriteLine("ThreadId: " + Thread.CurrentThread.ManagedThreadId);
                Console.WriteLine();
                return(await new HttpClient().GetStringAsync(uri));
            });

            // Separates the specified text into an array of words.
            TransformBlock <string, string[]> createWordList = new TransformBlock <string, string[]>(text =>
            {
                Console.WriteLine("Creating word list...");
                Console.WriteLine("ThreadId: " + Thread.CurrentThread.ManagedThreadId);
                Console.WriteLine();
                // Remove common punctuation by replacing all non-letter characters
                // with a space character.
                char[] tokens = text.Select(c => char.IsLetter(c) ? c : ' ').ToArray();
                text          = new string(tokens);

                // Separate the text into an array of words.
                return(text.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
            });

            // Removes short words and duplicates.
            TransformBlock <string[], string[]> filterWordList = new TransformBlock <string[], string[]>(words =>
            {
                Console.WriteLine("Filtering word list...");
                Console.WriteLine("ThreadId: " + Thread.CurrentThread.ManagedThreadId);
                Console.WriteLine();
                return(words
                       .Where(word => word.Length > 3)
                       .Distinct()
                       .ToArray());
            });

            // Finds all words in the specified collection whose reverse also
            // exists in the collection.
            TransformManyBlock <string[], string> findReversedWords = new TransformManyBlock <string[], string>(words =>
            {
                Console.WriteLine("Finding reversed words...");
                Console.WriteLine("ThreadId: " + Thread.CurrentThread.ManagedThreadId);
                Console.WriteLine();
                HashSet <string> wordsSet = new HashSet <string>(words);

                return(from word in words.AsParallel()
                       let reverse = new string(word.Reverse().ToArray())
                                     where word != reverse && wordsSet.Contains(reverse)
                                     select word);
            });

            // Prints the provided reversed words to the console.
            ActionBlock <string> printReversedWords = new ActionBlock <string>(reversedWord =>
            {
                Console.WriteLine("Found reversed words {0}/{1}", reversedWord, new string(reversedWord.Reverse().ToArray()));
                Console.WriteLine("ThreadId: " + Thread.CurrentThread.ManagedThreadId);
                Console.WriteLine();
            });

            //
            // Connect the dataflow blocks to form a pipeline.
            //

            DataflowLinkOptions linkOptions = new DataflowLinkOptions
            {
                // Gets or sets whether the linked target will have completion and
                // faulting notification propagated to it automatically
                PropagateCompletion = true
            };

            downloadString.LinkTo(createWordList, linkOptions);
            createWordList.LinkTo(filterWordList, linkOptions);
            filterWordList.LinkTo(findReversedWords, linkOptions);
            findReversedWords.LinkTo(printReversedWords, linkOptions);

            // Process "The Iliad of Homer" by Homer.

            /*This example uses DataflowBlock.Post to synchronously send data to the head
             * of the pipeline. Use the DataflowBlock.SendAsync method when you must asynchronously
             * send data to a dataflow node.*/
            downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt");

            /*Add the following code to mark the head of the pipeline as completed.
             * The head of the pipeline propagates its completion after it processes
             * all buffered messages.*/
            /*Signals to the IDataflowBlock that it should not accept nor produce any more
             * messages nor consume any more postponed messages*/
            downloadString.Complete();

            /*Add the following code to wait for the pipeline to finish. The overall operation is
             * finished when the tail of the pipeline finishes.*/
            // Wait for the last block in the pipeline to process all messages.
            printReversedWords.Completion.Wait();
        }
        public async Task TestCancellationExceptionsIgnored()
        {
            var t = new TransformBlock<int, int>(i => {
                if ((i % 2) == 0) throw new OperationCanceledException();
                return i;
            });
            t.PostRange(0, 2);
            t.Complete();
            for (int i = 0; i < 2; i++)
            {
                if ((i % 2) != 0)
                {
                    Assert.Equal(expected: i, actual: await t.ReceiveAsync());
                }
            }

            await t.Completion;
        }
        public async Task <RemoteSegmentWithData[]> GetRemoteMetadata(ImportedEvent[] events)
        {
            // asynchronously start downloading all the metadata we need
            Log.Info($"Begin downloading metadata for segments (Request concurrency: {this.maxDegreeOfParallelism})");

            var groupedEvents = new ConcurrentDictionary <long, ConcurrentBag <ImportedEvent> >(
                this.maxDegreeOfParallelism,
                events.Length / 10);

            // execution options allows us to throttle requests
            var options = new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism    = this.maxDegreeOfParallelism,
                SingleProducerConstrained = true,
            };

            // the transform block maps A->B, ensuring that all events have an audio recording id
            var getRecordingIdBlock = new TransformBlock <ImportedEvent, ImportedEvent>(
                (importedEvent) => this.GetAudioRecordingId(importedEvent),
                options);

            // all events are buffered into groups based on audio recording id
            var groupRecordingsBlock = new ActionBlock <ImportedEvent>(
                importedEvent =>
            {
                var collection = groupedEvents.GetOrAdd(
                    importedEvent.AudioRecordingId.Value,
                    new ConcurrentBag <ImportedEvent>());

                collection.Add(importedEvent);
            });

            // the metadata for each recording is retrieved and used to produce many segments (one per imported event)
            var createSegmentsBlock = new TransformManyBlock <KeyValuePair <long, ConcurrentBag <ImportedEvent> >, RemoteSegmentWithData>(
                (group) => this.DownloadRemoteMetadata(group.Key, group.Value),
                options);

            // the transform block can't `Complete` unless it's output is empty
            // so add a buffer block to store the transform block's output
            var bufferBlock = new BufferBlock <RemoteSegmentWithData>();

            // link the two parts of block A
            getRecordingIdBlock.LinkTo(groupRecordingsBlock);

            // link the two parts of block B
            createSegmentsBlock.LinkTo(bufferBlock);

            // kick off the chain, resolve audio recording ids and group
            foreach (var record in events)
            {
                // post an event to the transform block to process
                getRecordingIdBlock.Post(record);
            }

            Log.Trace("Finished posting messages to recording id resolver");
            getRecordingIdBlock.Complete();

            Log.Trace("Waiting for getRecordingIdBlock to resolve");
            await getRecordingIdBlock.Completion;

            Log.Trace("Waiting for groupRecordingsBlock to resolve");
            groupRecordingsBlock.Complete();
            await groupRecordingsBlock.Completion;

            var eventCount = groupedEvents.Sum(kvp => kvp.Value.Count);

            Log.Trace($"Finished waiting for recording ids to resolve, {eventCount} events grouped into {groupedEvents.Count} recordings");

            // now post the grouped audio recordings to the segment generating block
            foreach (var keyValuePair in groupedEvents)
            {
                createSegmentsBlock.Post(keyValuePair);
            }

            Log.Trace("Finished posting messages to recording metadata downloader");
            createSegmentsBlock.Complete();

            // wait for all requests to finish
            Log.Trace("Begin waiting for metadata downloader");
            await createSegmentsBlock.Completion;

            Log.Trace("Finished waiting for metadata downloader");

            if (bufferBlock.TryReceiveAll(out var segments))
            {
                RemoteSegmentWithData[] segmentsArray;
                int finalEventCount;
                lock (segments)
                {
                    segmentsArray = segments.ToArray();

                    // do some excessive logic checking because we used to have race conditions
                    finalEventCount = segmentsArray.Sum(x => x.Data.Count);
                    if (events.Length != finalEventCount)
                    {
                        throw new InvalidOperationException(
                                  $"The number of supplied events ({events.Length}) did" +
                                  $" not match the number of events that had metadata resolved ({finalEventCount})" +
                                  " - a race condition has occurred");
                    }
                }

                Log.Info($"Metadata generated for {finalEventCount} events, {segmentsArray.Length} segments created");

                return(segmentsArray);
            }
            else
            {
                throw new InvalidOperationException("Failed to retrieve media info from data flow.");
            }
        }
        [InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter
        public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool? EnsureOrdered)
        {
            const int iters = 1000;

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt };
            if (EnsureOrdered == null)
            {
                Assert.True(options.EnsureOrdered);
            }
            else
            {
                options.EnsureOrdered = EnsureOrdered.Value;
            }

            var tb = new TransformBlock<int, int>(i => Task.FromResult(i), options);
            tb.PostRange(0, iters);
            for (int i = 0; i < iters; i++)
            {
                Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
            }
            tb.Complete();
            await tb.Completion;
        }
Beispiel #58
0
        public static void main()
        {
            //
            // Create the members of the pipeline.
            //

            // Downloads the requested resource as a string.
            var downloadString = new TransformBlock <string, string>(async uri =>
            {
                Console.WriteLine("Downloading '{0}'...", uri);

                return(await new HttpClient().GetStringAsync(uri));
            });

            // Separates the specified text into an array of words.
            var createWordList = new TransformBlock <string, string[]>(text =>
            {
                Console.WriteLine("Creating word list...");

                // Remove common punctuation by replacing all non-letter characters
                // with a space character.
                char[] tokens = text.Select(c => char.IsLetter(c) ? c : ' ').ToArray();
                text          = new string(tokens);

                // Separate the text into an array of words.
                return(text.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
            });

            // Removes short words and duplicates.
            var filterWordList = new TransformBlock <string[], string[]>(words =>
            {
                Console.WriteLine("Filtering word list...");

                return(words
                       .Where(word => word.Length > 3)
                       .Distinct()
                       .ToArray());
            });

            // Finds all words in the specified collection whose reverse also
            // exists in the collection.
            var findReversedWords = new TransformManyBlock <string[], string>(words =>
            {
                Console.WriteLine("Finding reversed words...");

                var wordsSet = new HashSet <string>(words);

                return(from word in words.AsParallel()
                       let reverse = new string(word.Reverse().ToArray())
                                     where word != reverse && wordsSet.Contains(reverse)
                                     select word);
            });

            // Prints the provided reversed words to the console.
            var printReversedWords = new ActionBlock <string>(reversedWord =>
            {
                Console.WriteLine("Found reversed words {0}/{1}",
                                  reversedWord, new string(reversedWord.Reverse().ToArray()));
            });

            //
            // Connect the dataflow blocks to form a pipeline.
            //

            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            downloadString.LinkTo(createWordList, linkOptions);
            createWordList.LinkTo(filterWordList, linkOptions);
            filterWordList.LinkTo(findReversedWords, linkOptions);
            findReversedWords.LinkTo(printReversedWords, linkOptions);

            // Process "The Iliad of Homer" by Homer.
            downloadString.Post("http://www.gutenberg.org/cache/epub/16452/pg16452.txt");

            // Mark the head of the pipeline as complete.
            downloadString.Complete();

            // Wait for the last block in the pipeline to process all messages.
            printReversedWords.Completion.Wait();
        }
        public async Task TestOrdering_Sync_OrderedDisabled()
        {
            // If ordering were enabled, this test would hang.

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false };

            var mres = new ManualResetEventSlim();
            var tb = new TransformBlock<int, int>(i =>
            {
                if (i == 0) mres.Wait();
                return i;
            }, options);
            tb.Post(0);
            tb.Post(1);

            Assert.Equal(1, await tb.ReceiveAsync());
            mres.Set();
            Assert.Equal(0, await tb.ReceiveAsync());

            tb.Complete();
            await tb.Completion;
        }
Beispiel #60
0
        internal static bool TenTransformsToAction()
        {
            const int ITERS = 2;
            var first = new TransformBlock<int, int>(item => item);

            TransformBlock<int, int> t = first;
            for (int i = 0; i < 9; i++)
            {
                var next = new TransformBlock<int, int>(item => item);
                t.LinkWithCompletion(next);
                t = next;
            }
            int completedCount = 0;
            var last = new ActionBlock<int>(i => completedCount++);
            t.LinkWithCompletion(last);

            for (int i = 0; i < ITERS; i++) first.Post(i);
            first.Complete();
            last.Completion.Wait();

            return completedCount == ITERS;
        }