public static ITargetBlock<string> SetupPipeline()
        {
            var fileNamesForPath = new TransformBlock<string, IEnumerable<string>>(
              path =>
              {
                  return GetFileNames(path);
              });

            var lines = new TransformBlock<IEnumerable<string>, IEnumerable<string>>(
              fileNames =>
              {
                  return LoadLines(fileNames);
              });

            var words = new TransformBlock<IEnumerable<string>, IEnumerable<string>>(
              lines2 =>
              {
                  return GetWords(lines2);
              });

            var display = new ActionBlock<IEnumerable<string>>(
              coll =>
              {
                  foreach (var s in coll)
                  {
                      WriteLine(s);
                  }
              });

       
            fileNamesForPath.LinkTo(lines);
            lines.LinkTo(words);
            words.LinkTo(display);
            return fileNamesForPath;
        }
        public void Run()
        {
            Console.WriteLine("Generating first {0} powers of 2.", MaxItems);
            var bufferBlock = new BufferBlock<int>();
            var transformBlock = new TransformBlock<int, double>(i =>
            {
                Thread.Sleep(500);
                return Math.Pow(2, i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 10 });

            var actionBlock = new ActionBlock<double>(async i =>
            {
                await Task.Delay(1000);
                Console.WriteLine(i);
                _waitHandle.Signal();
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 10 });

            bufferBlock.LinkTo(transformBlock);
            transformBlock.LinkTo(actionBlock);

            Enumerable.Range(1, MaxItems)
                .ToList()
                .ForEach(i => bufferBlock.Post(i));

            _waitHandle.Wait();
        }
        public async Task TransformThroughDiscardingFilterToAction()
        {
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, new DataflowLinkOptions { PropagateCompletion = true }, i => i % 2 == 0);
            t.LinkTo(DataflowBlock.NullTarget<int>());

            t.PostRange(0, Iterations);
            t.Complete();

            await c.Completion;
            Assert.Equal(expected: Iterations / 2, actual: completedCount);
        }
        public void Run()
        {
            var cts = new CancellationTokenSource();

            Console.WriteLine("Generating first {0} powers of 2.", MaxItems);
            var bufferBlock = new BufferBlock<int>(new DataflowBlockOptions { CancellationToken = cts.Token });
            Enumerable.Range(1, MaxItems)
                .ToList()
                .ForEach(i => bufferBlock.Post(i));
            Console.WriteLine("Scheduling cancellation after 5 seconds.");
            cts.CancelAfter(TimeSpan.FromSeconds(5));

            Console.WriteLine("Creating and linking the remaing blocks to the network.");
            var transformBlock = new TransformBlock<int, double>(i =>
            {
                Thread.Sleep(500);
                return Math.Pow(2, i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1, CancellationToken = cts.Token });

            var actionBlock = new ActionBlock<double>(async i =>
            {
                await Task.Delay(1000);
                Console.WriteLine(i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 10, CancellationToken = cts.Token });

            bufferBlock.LinkTo(transformBlock, new DataflowLinkOptions { PropagateCompletion = true });
            transformBlock.LinkTo(actionBlock, new DataflowLinkOptions { PropagateCompletion = true });

            var t1 = bufferBlock.Completion.ContinueWith(t => Console.WriteLine("Buffer block status: {0}", t.Status));
            var t2 = actionBlock.Completion.ContinueWith(t => Console.WriteLine("Action block status: {0}", t.Status));
            Console.WriteLine("Waiting for the network to finish.");
            Task.WaitAll(t1, t2);
        }
        public void Start()
        {
            var sink = new ActionBlock<PageResultMessage>((Action<PageResultMessage>)Sink);
            var source = new BufferBlock<GetPageMessage>();
            var linkOptions = new DataflowLinkOptions {PropagateCompletion = false};

            for (int i = 0; i < 10; i++)
            {
                var options = new ExecutionDataflowBlockOptions
                    {
                        BoundedCapacity = 1
                    };
                var worker = new TransformBlock<GetPageMessage, PageResultMessage>(
                    (Func<GetPageMessage, PageResultMessage>)Worker, options);
                source.LinkTo(worker, linkOptions);
                worker.LinkTo(sink, linkOptions);
            }

            foreach (var url in UrlList.Urls)
            {
                source.Post(new GetPageMessage{ Url = url });
            }
            source.Complete();
            sink.Completion.Wait();
        }
        public void Run()
        {
            Console.WriteLine("Generating first {0} powers of 2.", MaxItems);
            var bufferBlock = new BufferBlock<int>();
            Enumerable.Range(1, MaxItems)
                .ToList()
                .ForEach(i => bufferBlock.Post(i));

            Console.WriteLine("Signaling completion to the source block.");
            bufferBlock.Complete();
            Console.WriteLine("Done.");

            Console.WriteLine("Creating and linking the remaing blocks to the network.");
            var transformBlock = new TransformBlock<int, double>(i =>
            {
                Thread.Sleep(200);
                return Math.Pow(2, i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });

            var actionBlock = new ActionBlock<double>(async i =>
            {
                await Task.Delay(500);
                Console.WriteLine(i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 10 });

            bufferBlock.LinkTo(transformBlock, new DataflowLinkOptions { PropagateCompletion = true });
            transformBlock.LinkTo(actionBlock, new DataflowLinkOptions { PropagateCompletion = true });

            Console.WriteLine("Waiting for the completion to be propagated through the network...");
            actionBlock.Completion.ContinueWith(t =>
            {
                Console.WriteLine("Finished processing.");
                Console.WriteLine(string.Format("Completion status: {0}.", t.Status));
            }).Wait();
        }
示例#7
0
        public static async Task ValidateBlockAsync(ICoreStorage coreStorage, ICoreRules rules, Chain newChain, ISourceBlock<ValidatableTx> validatableTxes, CancellationToken cancelToken = default(CancellationToken))
        {
            // tally transactions
            object finalTally = null;
            var txTallier = new TransformBlock<ValidatableTx, ValidatableTx>(
                validatableTx =>
                {
                    var runningTally = finalTally;
                    rules.TallyTransaction(newChain, validatableTx, ref runningTally);
                    finalTally = runningTally;

                    return validatableTx;
                });
            validatableTxes.LinkTo(txTallier, new DataflowLinkOptions { PropagateCompletion = true });

            // validate transactions
            var txValidator = InitTxValidator(rules, newChain, cancelToken);

            // begin feeding the tx validator
            txTallier.LinkTo(txValidator, new DataflowLinkOptions { PropagateCompletion = true });

            // validate scripts
            var scriptValidator = InitScriptValidator(rules, newChain, cancelToken);

            // begin feeding the script validator
            txValidator.LinkTo(scriptValidator, new DataflowLinkOptions { PropagateCompletion = true });

            //TODO
            await PipelineCompletion.Create(
                new Task[] { },
                new IDataflowBlock[] { validatableTxes, txTallier, txValidator, scriptValidator });

            // validate overall block
            rules.PostValidateBlock(newChain, finalTally);
        }
示例#8
0
        public MessagePipeline()
        {
            linkOptions = new DataflowLinkOptions { PropagateCompletion = true };

            buildMessage = new TransformBlock<object, Messaging.Message>(
                x => {
                    Console.WriteLine("buildMessage| message: {0}", x);
                    return new Messaging.Message { Body = x };
                });

            logMessage = new TransformBlock<Messaging.Message, Messaging.Message>
                (x => {
                    Console.WriteLine("logMessage| MessageId: {0}. Body: {1}.", x.MessageId, x.Body);
                    return x;
                });

            sendMessage = new TransformBlock<Messaging.Message, Messaging.Message>(
                x => {
                    Console.WriteLine("sendMessage| MessageId: {0}. Body: {1}.", x.MessageId, x.Body);
                    return x;
                });

            buildMessage.LinkTo(logMessage, linkOptions);
            logMessage.LinkTo(sendMessage, linkOptions);
        }
        /// <summary>
        /// Search Asynchrony many extension in all of Fixed and Removable Disks.
        /// </summary>
        /// <param name="targetExtensions">Some file extensions for use search pattern.</param>
        /// <example>
        /// FileExtension example:
        ///     {".jpg", 646546Byte, 646Byte}
        ///     {".pdf", 25464645546Byte, 60000Byte}
        /// </example>
        /// <returns>A sorted list of detected files</returns>
        public static async Task<List<FileInfo>> DiskParallelProbingAsync(List<FileExtensionOption> targetExtensions, System.Threading.CancellationTokenSource CTS)
        {
            return await Task.Run(() =>
                {
                    searchComplete = false;
                    //
                    Reporter("DiskProbing", new ReportEventArgs("DiskProbing", ReportCodes.DiskProbingStarted, "---{Search Disks Started}---"));

                    List<FileInfo> _result = new List<FileInfo>();
                    //
                    // Find specific folders from windows drives instead of the total drive.
                    //
                    FolderInfo[] SpecificsDirectory = CheckDirectoriesChanges.GetDirectoriesInformation();
                    //
                    // Set Data-flow 
                    //
                    TransformBlock<FolderInfo, List<FileInfo>> TB = new TransformBlock<FolderInfo, List<FileInfo>>(dir =>
                    {
                        Reporter(dir, new ReportEventArgs("DiskProbing",
                            ReportCodes.TheSearchBeginning,
                            "Searching  {0} ...", dir.FullName));

                        List<FileInfo> res = dir.GetDirectoryInfo.SearchDirectory(targetExtensions, CTS);

                        Reporter(dir, new ReportEventArgs("DiskProbing",
                            ReportCodes.TheSearchCompleted,
                            "The Search  {0} was completed!", dir.FullName));

                        return res;
                    }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = Environment.ProcessorCount });

                    ActionBlock<List<FileInfo>> AB = new ActionBlock<List<FileInfo>>(lst => _result.AddRange(lst));

                    //
                    // Search specific folders from windows drives instead of the total drive.
                    //
                    try
                    {
                        TB.LinkTo(AB);

                        ParallelOptions opt = new ParallelOptions() { CancellationToken = CTS.Token, MaxDegreeOfParallelism = Environment.ProcessorCount };
                        var pLoop = Parallel.ForEach(SpecificsDirectory, opt, async dir => await TB.SendAsync(dir));

                        TB.Complete();
                        TB.Completion.Wait();
                    }
                    catch (Exception ex) { Reporter(SpecificsDirectory, new ReportEventArgs("SearchEngine.DiskProbing.SpecificsDirectory", ex)); }



                    searchComplete = true;
                    Reporter("DiskProbing", new ReportEventArgs("DiskProbing",
                        ReportCodes.DiskProbingFinished,
                        "---{Search Disks Finished}---"));

                    LastSearchResult = _result;
                    return _result;
                });
        }
示例#10
0
        static public void ProcessingByTPL_StraightForwardImplementation()
        {
            const string pathToFiles = @"..\..\..\..\DataFiles";
            string[] files = Directory.GetFiles(pathToFiles, "*.txt", SearchOption.AllDirectories);

            var loadDataFromFileBlock = new TransformBlock<string[], List<CustomerTextData>>(fileItems =>
            {
                var factory = new CustomerTextDataFactory();
                return new List<CustomerTextData>(Array.ConvertAll(fileItems, factory.LoadFromFile));
            });
            var filterBlock = new TransformBlock<List<CustomerTextData>, List<CustomerTextData>>(textDataList =>
            {
                var filter = new FilterTextData(5);
                return textDataList.Where(filter.Run).ToList();
            });
            var toListBlock = new TransformManyBlock<List<CustomerTextData>, CustomerTextData>(textDataList =>
            {
                var queue = new ConcurrentQueue<CustomerTextData>();
                textDataList.ForEach(queue.Enqueue);
                return queue;
            });
            var action = new ActionBlock<CustomerTextData>(textData =>
            {
                var weight = new WeightTextData();
                int result = weight.Run(textData);
                Trace.WriteLine(result);
                Console.WriteLine(result);
            });

            loadDataFromFileBlock.LinkTo(filterBlock);
            filterBlock.LinkTo(toListBlock);
            toListBlock.LinkTo(action);

            loadDataFromFileBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)filterBlock).Fault(t.Exception);
                else filterBlock.Complete();
            });
            filterBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)toListBlock).Fault(t.Exception);
                else toListBlock.Complete();
            });
            toListBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted) ((IDataflowBlock)action).Fault(t.Exception);
                else action.Complete();
            });

            loadDataFromFileBlock.Post(files);
            loadDataFromFileBlock.Complete();
            action.Completion.Wait();
        }
示例#11
0
        static void Main(string[] args)
        {
            string s =
                "http://cn.bing.com/search?q=MD5CryptoServiceProvider+slow&qs=n&pq=md5cryptoserviceprovider+slow&sc=0-25&sp=-1&sk=&cvid=67d40cbd8c424d55a3db83e6e9868267&first=51&FORM=PERE4";
            using (MD5CryptoServiceProvider md5 = new MD5CryptoServiceProvider())
            {
                byte[] inBytes = Encoding.UTF8.GetBytes(s);
                var bytes = md5.ComputeHash(inBytes);
                Console.WriteLine(bytes.Length);
            }


            var splitter = new TransformBlock<string, KeyValuePair<string, int>>(
                input =>
                    {
                        var splitted = input.Split('=');
                        return new KeyValuePair<string, int>(splitted[0], int.Parse(splitted[1]));
                    });

            var dict = new Dictionary<string, int>();
            var aggregater = new ActionBlock<KeyValuePair<string, int>>(
                pair =>
                    {
                        int oldValue;
                        dict[pair.Key] = dict.TryGetValue(pair.Key, out oldValue) ? oldValue + pair.Value : pair.Value;
                    });

            splitter.LinkTo(aggregater, new DataflowLinkOptions() { PropagateCompletion = true});

            splitter.Post("a=1");
            splitter.Post("b=2");
            splitter.Post("a=5");

            splitter.Complete();
            aggregater.Completion.Wait();
            Console.WriteLine("sum(a) = {0}", dict["a"]); //prints sum(a) = 6


            //CalcAsync().Wait();
            //SlowFlowAsync().Wait();
            //FailDemoAsync().Wait();
            //TransformAndLinkDemo().Wait();
            //LinkLeftToDemo().Wait();
            //CircularFlowAutoComplete().Wait();
            //RecorderDemo().Wait();
            BulkInserterDemo().Wait();
            //BulkInserterDemo2().Wait();
            //BroadcasterDemo().Wait();
            //MyLoggerDemo().Wait();
            //ETLLookupDemo().Wait();
        }
        /// <summary>
        /// Initials the transmitter asynchronous.
        /// Check the server and then database existence and ...
        /// </summary>
        public static async Task InitialTransmitterAsync()
        {
            await ServerValidatorAsync();

            ErrorListenerTransformBlock = new TransformBlock<ProxyError, Tuple<ProxyError, bool>>(
                async (e) => await TransmitOneError(e),
                new ExecutionDataflowBlockOptions()
                {
                    MaxMessagesPerTask = 1,
                    MaxDegreeOfParallelism = 1
                });

            ErrorListenerTransformBlock.LinkTo(CacheController.AcknowledgeActionBlock);
        }
示例#13
0
		public void DeferredUsageTest ()
		{
			int[] array = new int[10];
			var action = new ActionBlock<int> (i => array[Math.Abs (i)] = i);
			var block = new TransformBlock<int, int> (i => -i);

			for (int i = 0; i < array.Length; ++i)
				Assert.IsTrue (block.Post (i), "Not accepted");

			Thread.Sleep (300);
			block.LinkTo (action);
			Thread.Sleep (100);

			CollectionAssert.AreEqual (new[] { 0, -1, -2, -3, -4, -5, -6, -7, -8, -9 }, array);
		}
示例#14
0
		public void BasicUsageTest ()
		{
			int[] array = new int[10];
			var evt = new ManualResetEventSlim (false);
			ActionBlock<int> action = new ActionBlock<int> ((i) => { array[Math.Abs (i)] = i; evt.Set (); });
			TransformBlock<int, int> block = new TransformBlock<int, int> (i => -i);
			block.LinkTo (action);

			for (int i = 0; i < array.Length; ++i)
				Assert.IsTrue (block.Post (i), "Not accepted");

			evt.Wait ();

			CollectionAssert.AreEqual (new int[] { 0, -1, -2, -3, -4, -5, -6, -7, -8, -9 }, array);
		}
示例#15
0
        public Statsd(string serviceName = null)
        {
            _log.Info("statsd.net starting.");
            _tokenSource = new CancellationTokenSource();
            _shutdownComplete = new ManualResetEvent(false);

            SuperCheapIOC.Add(_log);
            var systemInfoService = new SystemInfoService();
            SuperCheapIOC.Add(systemInfoService as ISystemInfoService);
            serviceName = serviceName ?? systemInfoService.HostName;
            var systemMetricsService = new SystemMetricsService("statsdnet", serviceName);
            SuperCheapIOC.Add(systemMetricsService as ISystemMetricsService);

            /**
             * The flow is:
             *  Listeners ->
             *    Message Parser ->
             *      router ->
             *        Aggregator ->
             *          Broadcaster ->
             *            Backends
             */

            // Initialise the core blocks
            _router = new StatsdMessageRouterBlock();
            _messageParser = MessageParserBlockFactory.CreateMessageParserBlock(_tokenSource.Token,
              SuperCheapIOC.Resolve<ISystemMetricsService>(),
              _log);
            _messageParser.LinkTo(_router);
            _messageParser.Completion.LogAndContinueWith(_log, "MessageParser", () =>
              {
                  _log.Info("MessageParser: Completion signaled. Notifying the MessageBroadcaster.");
                  _messageBroadcaster.Complete();
              });
            _messageBroadcaster = new BroadcastBlock<Bucket>(Bucket.Clone);
            _messageBroadcaster.Completion.LogAndContinueWith(_log, "MessageBroadcaster", () =>
              {
                  _log.Info("MessageBroadcaster: Completion signaled. Notifying all backends.");
                  _backends.ForEach(q => q.Complete());
              });

            // Add the broadcaster to the IOC container
            SuperCheapIOC.Add<BroadcastBlock<Bucket>>(_messageBroadcaster);
            systemMetricsService.SetTarget(_messageBroadcaster);

            _backends = new List<IBackend>();
            _listeners = new List<IListener>();
        }
示例#16
0
        public void ProcessFile(string fileName)
        {
            var inputBlock = new BufferBlock<string>();

            var readBlock = new TransformBlock<string, string>(
                (input) =>
                {
                    Console.WriteLine("Loading " + input);
                    return File.ReadAllText(input);
                });

            var compileBlock = new TransformBlock<string, string>(
                (input) =>
                {
                    Console.WriteLine("Processing...");
                    var client = new WebClient();
                    client.Headers.Add("content-type", "application/x-www-form-urlencoded");
                    string apiData = string.Format(Data, HttpUtility.UrlEncode(input));
                    return client.UploadString(Url, apiData);
                });

            var convertBlock = new TransformBlock<string, XDocument>(
                (input) =>
                {
                    Console.WriteLine("Converting to XDocument...");
                    var xml = XDocument.Parse(input);
                    return xml;
                });

            var outputBlock = new ActionBlock<XDocument>(
                (input) =>
                {
                    Console.WriteLine("Writing compressed-" + fileName);
                    XElement compiledCode = input.Element("compilationResult").Element("compiledCode");
                    File.WriteAllText("compressed-" + fileName, compiledCode.Value);
                });

            inputBlock.LinkTo(readBlock, new DataflowLinkOptions { PropagateCompletion = true });
            readBlock.LinkTo(compileBlock, new DataflowLinkOptions { PropagateCompletion = true });
            compileBlock.LinkTo(convertBlock, new DataflowLinkOptions { PropagateCompletion = true });
            convertBlock.LinkTo(outputBlock, new DataflowLinkOptions { PropagateCompletion = true });

            inputBlock.Post(fileName);
            inputBlock.Complete();
            outputBlock.Completion.Wait();
        }
示例#17
0
        internal static bool TransformThroughFilterToAction()
        {
            const int ITERS = 2;
            int completedCount = 0;

            var t = new TransformBlock<int, int>(i => i);
            var c = new ActionBlock<int>(i => completedCount++);

            t.LinkTo(c, i => true);
            t.Completion.ContinueWith(_ => c.Complete());

            for (int i = 0; i < ITERS; i++) t.Post(i);
            t.Complete();
            c.Completion.Wait();

            return completedCount == ITERS;
        }
示例#18
0
        public static void Example1()
        {
            var conf = new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = 4 };

            ActionBlock<int> a = new ActionBlock<int>(i =>
            {
                Thread.Sleep(500);
                Console.WriteLine(i);
            }, conf);
            TransformBlock<int, int> t = new TransformBlock<int, int>(i => i * 3);

            t.LinkTo(a);

            for (int i = 0; i < 12; i++)
            {
                t.Post(i);
            }
        }
示例#19
0
        public void Run()
        {
            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 8 };

            var tb = new TransformBlock<int, int>(i => i * 2);
            var ab = new ActionBlock<int>(i => this.Compute(i), options);
            tb.LinkTo(ab);

            for (var i = 0; i < 10; i++)
            {
                tb.Post(i);
            }

            tb.Complete();
            tb.Completion.Wait();

            Thread.Sleep(500);
        }
示例#20
0
        public async Task TransformToAction()
        {
            var t = new TransformBlock<int, int>(i => i * 2);
            int completedCount = 0;
            int prev = -2;
            var c = new ActionBlock<int>(i =>
            {
                completedCount++;
                Assert.Equal(expected: i, actual: prev + 2);
                prev = i;
            });
            t.LinkTo(c, new DataflowLinkOptions { PropagateCompletion = true });

            t.PostRange(0, Iterations);
            t.Complete();

            await c.Completion;
            Assert.True(completedCount == Iterations);
        }
示例#21
0
		public void BasicUsageTest ()
		{
			int[] array = new int[10];
			var evt = new CountdownEvent (10);
			var action = new ActionBlock<int> (i =>
			{
				array [Math.Abs (i)] = i;
				evt.Signal ();
			});
			var block = new TransformBlock<int, int> (i => -i);
			block.LinkTo (action);

			for (int i = 0; i < array.Length; ++i)
				Assert.IsTrue (block.Post (i), "Not accepted");

			evt.Wait ();

			CollectionAssert.AreEqual (
				new[] { 0, -1, -2, -3, -4, -5, -6, -7, -8, -9 }, array);
		}
        public void Run()
        {
            Console.WriteLine("Generating first 10 powers of 2.");
            var bufferBlock = new BufferBlock<int>();
            Enumerable.Range(1, 10)
                .ToList()
                .ForEach(i => bufferBlock.Post(i));

            var transformBlock = new TransformBlock<int, double>(i =>
            {
                Console.WriteLine("Raising 2 to the power of {0}.", i);
                if (i == 5)
                {
                    Console.WriteLine("32 is so mainstream... Throwing exception...");
                    throw null;
                }

                return Math.Pow(2, i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 1 });

            var actionBlock = new ActionBlock<double>(async i =>
            {
                await Task.Delay(500);
                Console.WriteLine(i);
            }, new ExecutionDataflowBlockOptions { BoundedCapacity = 10 });

            var completion = actionBlock.Completion.ContinueWith(t =>
            {
                Console.WriteLine("Processing failed.");
                Console.WriteLine(t.Exception.Message);
            }, TaskContinuationOptions.OnlyOnFaulted);

            var options = new DataflowLinkOptions { PropagateCompletion = true };
            bufferBlock.LinkTo(transformBlock, options);
            transformBlock.LinkTo(actionBlock, options);

            completion.Wait();
        }
示例#23
0
        static void Main(string[] args)
        {
            var multiplyBlock = new TransformBlock<int, int>(value => value * 2);
            var subtractBlock = new TransformBlock<int, int>(value => value - 2);
            var displayBlock = new ActionBlock<int>(value => Console.WriteLine(value));

            // multiplyBlock ==> subtractBlock ==> displayBlock
            var linkOptions = new DataflowLinkOptions { PropagateCompletion = true };
            multiplyBlock.LinkTo(subtractBlock, linkOptions);
            subtractBlock.LinkTo(displayBlock, linkOptions);

            // Put data in the first block (multiplyBlock)
            foreach (var i in Enumerable.Range(0, 10))
                multiplyBlock.Post(i);

            // Mark it as complete. Completion will propagate because of the link options.
            multiplyBlock.Complete();

            // Wait for the last block (displayBlock) to complete.
            displayBlock.Completion.Wait();

            Console.ReadKey();
        }
示例#24
0
        public async static void TData()
        {
            var multiplyBlock = new TransformBlock<int, int>(item =>
            {
                var res = item * 2;
                Console.WriteLine("{0} * 2 = {1}", item, res);
                return res;
            });

            var divideBlock = new TransformBlock<int, int>(item =>
            {
                var res = item / 2;
                Console.WriteLine("{0} / 2 = {1}", item, res);
                return res;
            });

            multiplyBlock.LinkTo(divideBlock);

            multiplyBlock.Post(2);

            multiplyBlock.Complete();
            await divideBlock.Completion;
        }
示例#25
0
        private static void Main(string[] args)
        {
            var generatorBlock = new TransformManyBlock<int, string>(num => GenerateStrings(num));
            var writerBlock = new TransformBlock<string, string>(str => WriteString(str), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 5 });
            var finishingBlock = new ActionBlock<string>(str =>
            {
                writerBlock.Completion.Wait();
                Console.WriteLine(Thread.CurrentThread.ManagedThreadId + ": finished - " + str);
            });

            generatorBlock.LinkTo(writerBlock, new DataflowLinkOptions{PropagateCompletion = true});
            writerBlock.LinkTo(finishingBlock, new DataflowLinkOptions { PropagateCompletion = true });

            for (var i = 1; i <= 3; i++)
            {
                Console.WriteLine("Posted " + i*10);
                generatorBlock.Post(i*10);
            }
            generatorBlock.Complete();
            finishingBlock.Completion.Wait();

            Console.WriteLine("Pipeline is finished");
            Console.ReadKey();
        }
示例#26
0
        public static Task GenerateAnnotatedPathsAsync(IEnumerable<CollectionPath> paths, Func<FileInfo, bool> filePredicate,
            ITargetBlock<AnnotatedPath[]> filePathTargetBlock, CancellationToken cancellationToken)
        {
            var shuffleBlock = new TransformBlock<AnnotatedPath[], AnnotatedPath[]>(
                filenames =>
                {
                    // Sequential names tend to fall into the same AWS S3 partition, so we
                    // shuffle things around.
                    RandomUtil.Shuffle(filenames);

                    return filenames;
                }, new ExecutionDataflowBlockOptions { CancellationToken = cancellationToken, MaxDegreeOfParallelism = Environment.ProcessorCount });

            shuffleBlock.LinkTo(filePathTargetBlock, new DataflowLinkOptions { PropagateCompletion = true });

            var batcher = new BatchBlock<AnnotatedPath>(2048, new GroupingDataflowBlockOptions { CancellationToken = cancellationToken });

            batcher.LinkTo(shuffleBlock, new DataflowLinkOptions
            {
                PropagateCompletion = true
            });

            return PostAllFilePathsAsync(paths, filePredicate, batcher, cancellationToken);
        }
示例#27
0
 public IDisposable LinkTo(ITargetBlock <OutputFile> target, DataflowLinkOptions linkOptions)
 {
     return(_generateTestClassBlock.LinkTo(target, linkOptions));
 }
示例#28
0
        async static Task ProcessAsynchronously()
        {
            var    cts  = new CancellationTokenSource();
            Random _rnd = new Random(DateTime.Now.Millisecond);

            Task.Run(() =>
            {
                if (Console.ReadKey().KeyChar == 'c')
                {
                    cts.Cancel();
                }
            }, cts.Token);

            // BufferBlock:将元素传给流中的下一个块
            // BoundedCapacity:指定其容量,超过时不再接受新元素,直到一个现有元素被传递给下一个块
            var inputBlock = new BufferBlock <int>(new DataflowBlockOptions {
                BoundedCapacity = 5, CancellationToken = cts.Token
            });

            // TransformBlock:用于数据转换步骤
            //   MaxDegreeOfParallelism:通过该选项指定最大工作者线程数
            // 将int转换为decimal
            var convertToDecimalBlock = new TransformBlock <int, decimal>(n =>
            {
                decimal result = Convert.ToDecimal(n * 100);
                Console.WriteLine($"Decimal Converter sent {result} to the next stage on {Thread.CurrentThread.ManagedThreadId}");
                Thread.Sleep(TimeSpan.FromMilliseconds(_rnd.Next(200)));
                return(result);
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = 4, CancellationToken = cts.Token
            });

            // 将decimal转换为string
            var stringifyBlock = new TransformBlock <decimal, string>(n =>
            {
                string result = $"--{n.ToString("C", CultureInfo.GetCultureInfo("en-us"))}--";
                Console.WriteLine($"String Formatter sent {result} to the next stage on {Thread.CurrentThread.ManagedThreadId}");
                Thread.Sleep(TimeSpan.FromMilliseconds(_rnd.Next(200)));
                return(result);
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = 4, CancellationToken = cts.Token
            });

            // ActionBlock:对每个传入的元素运行一个指定的操作
            var outputBlock = new ActionBlock <string>(s => {
                Console.WriteLine($"The final result is {s} on thread id {Thread.CurrentThread.ManagedThreadId}");
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = 4, CancellationToken = cts.Token
            });

            // 通过LinkTo方法将这些块连接到一起
            // PropagateCompletion = true : 当前步骤完成时,自动将结果和异常传播到下一个阶段
            inputBlock.LinkTo(convertToDecimalBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            convertToDecimalBlock.LinkTo(stringifyBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            stringifyBlock.LinkTo(outputBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });

            try
            {
                // 向块中添加项
                Parallel.For(0, 20, new ParallelOptions {
                    MaxDegreeOfParallelism = 4, CancellationToken = cts.Token
                }, i => {
                    Console.WriteLine($"added {i} to source data on thread id {Thread.CurrentThread.ManagedThreadId}");
                    inputBlock.SendAsync(i).GetAwaiter().GetResult();
                });
                // 添加完成后需要调用Complete方法
                inputBlock.Complete();
                // 等待最后的块完成
                await outputBlock.Completion;
                Console.WriteLine("Press ENTER to exit.");
            }
            catch (OperationCanceledException)
            {
                Console.WriteLine("Operation has been canceled! Press ENTER to exit.");
            }

            Console.ReadLine();
        }
示例#29
0
        static async Task Main(string[] args)
        {
            Console.WriteLine("Start");
            var option = new ExecutionDataflowBlockOptions {
                BoundedCapacity = 2
            };
            var option1 = new ExecutionDataflowBlockOptions {
                BoundedCapacity = 2, MaxDegreeOfParallelism = 3
            };
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            var downloader = new TransformBlock <string, byte[]>(async url =>
            {
                using (var http = new HttpClient())
                {
                    var data = await http.GetByteArrayAsync(url).ConfigureAwait(false);
                    return(data);
                }
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = 10
            });

            var toImage = new TransformBlock <byte[], Image <Rgba32> >(buffer => Image.Load(buffer));

            var grayscale = new TransformBlock <Image <Rgba32>, Image <Rgba32> >(image =>
            {
                image.Mutate(x => x.Grayscale());
                return(image);
            });
            var oilPaint = new TransformBlock <Image <Rgba32>, Image <Rgba32> >(image =>
            {
                Console.Write("+OP ");
                image.Mutate(x => x.OilPaint());
                Console.Write("-OP ");
                return(image);
            }, option1);
            var pixelate = new TransformBlock <Image <Rgba32>, Image <Rgba32> >(image =>
            {
                Console.Write("+PX ");
                image.Mutate(x => x.Pixelate());
                Console.Write("-PX ");
                return(image);
            }, option);
            var blur = new TransformBlock <Image <Rgba32>, Image <Rgba32> >(image =>
            {
                Console.Write("+BL ");
                image.Mutate(x => x.GaussianBlur());
                Console.Write("-BL ");
                return(image);
            }, option1);
            var save = new ActionBlock <Image <Rgba32> >(async image =>
            {
                using (var f = new FileStream($"{Guid.NewGuid():N}.jpg",
                                              FileMode.Create, FileAccess.Write, FileShare.None, 4096,
                                              FileOptions.Asynchronous))
                    using (var mem = new MemoryStream())
                    {
                        image.SaveAsJpeg(mem);
                        mem.Position = 0;
                        await mem.CopyToAsync(f).ConfigureAwait(false);
                    }
            });

            toImage.LinkTo(grayscale, linkOptions);
            grayscale.LinkTo(oilPaint, linkOptions);
            grayscale.LinkTo(blur, linkOptions);
            grayscale.LinkTo(pixelate, linkOptions);
            pixelate.LinkTo(save);
            oilPaint.LinkTo(save);
            blur.LinkTo(save);
            Task _ = Task.WhenAll(pixelate.Completion, oilPaint.Completion, blur.Completion)
                     .ContinueWith(c => save.Complete());

            for (int i = 0; i < 30; i++)
            {
                downloader.Post(URL);
            }
            await Task.Delay(4000).ConfigureAwait(false);

            downloader.LinkTo(toImage);
            downloader.Complete();

            await save.Completion.ConfigureAwait(false);

            Console.WriteLine("Done");


            Console.ReadKey();
        }
示例#30
0
        public static void Start()
        {
            const int bc = 1;
            // Download a book as a string
            var downloadBook = new TransformBlock <string, string>(uri =>
            {
                Console.WriteLine("Downloading the book...");

                return(new WebClient().DownloadString(uri));
            }, new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = bc
            });


            // splits text into an array of strings.
            var createWordList = new TransformBlock <string, string[]>(text =>
            {
                Console.WriteLine("Creating list of words...");

                // Remove punctuation
                char[] tokens = text.ToArray();
                for (int i = 0; i < tokens.Length; i++)
                {
                    if (!char.IsLetter(tokens[i]))
                    {
                        tokens[i] = ' ';
                    }
                }
                text = new string(tokens);

                return(text.Split(new char[] { ' ' },
                                  StringSplitOptions.RemoveEmptyEntries));
            }, new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = bc
            });

            // Remove short words and return the count
            var filterWordList = new TransformBlock <string[], int>(words =>
            {
                Console.WriteLine("Counting words...");

                var wordList = words.Where(word => word.Length > 3).OrderBy(word => word)
                               .Distinct().ToArray();
                return(wordList.Count());
            }, new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity = bc
            });

            var printWordCount = new ActionBlock <int>(wordcount =>
            {
                Console.WriteLine("Found {0} words",
                                  wordcount);
            });

            downloadBook.LinkTo(createWordList);
            createWordList.LinkTo(filterWordList);
            filterWordList.LinkTo(printWordCount);

            // For each completion task in the pipeline, create a continuation task
            // that marks the next block in the pipeline as completed.
            // A completed dataflow block processes any buffered elements, but does
            // not accept new elements.
            //

            downloadBook.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)createWordList).Fault(t.Exception);
                }
                else
                {
                    createWordList.Complete();
                }
            });
            createWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)filterWordList).Fault(t.Exception);
                }
                else
                {
                    filterWordList.Complete();
                }
            });
            filterWordList.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)printWordCount).Fault(t.Exception);
                }
                else
                {
                    printWordCount.Complete();
                }
            });

            // Download Origin of Species
            downloadBook.Post("http://www.gutenberg.org/files/2009/2009.txt");
            downloadBook.Post("http://www.gutenberg.org/files/2010/2010.txt");
            downloadBook.Post("http://www.gutenberg.org/files/2011/2011.txt");

            // Mark the head of the pipeline as complete.
            downloadBook.Complete();

            printWordCount.Completion.Wait();

            Console.WriteLine("Finished. Press any key to exit.");
            Console.ReadLine();
        }
示例#31
0
文件: Pipeline.cs 项目: SashaVuu/MPP
        public async Task Execute(List <string> filesPath, string outputPath)
        {
            //Блок чтения из файла
            //path,content
            var readingBlock = new TransformBlock <string, string>(
                async FilePath =>
            {
                Console.WriteLine("File path:  " + FilePath);
                using (StreamReader reader = new StreamReader(FilePath))
                {
                    return(await reader.ReadToEndAsync());
                }
            },
                new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = pipelineConfig.MaxReadingTasks
            }
                );


            //Блок создания теста
            //code,tests
            var generateTestClass = new TransformManyBlock <string, TestStructure>(
                async Code =>
            {
                Console.WriteLine("Generating tests... ");
                return(await TestCreator.Generate(Code));
            },
                new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = pipelineConfig.MaxProcessingTasks
            }
                );


            //Блок записи теста в файл
            //TestStructure
            var writeGeneratedFile = new ActionBlock <TestStructure>(
                async testClass =>
            {
                string fullpath = Path.Combine(outputPath, testClass.TestName);
                Console.WriteLine("Fullpath " + fullpath);
                using (StreamWriter writer = new StreamWriter(fullpath))
                {
                    await writer.WriteAsync(testClass.TestCode);
                }
            },
                new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = pipelineConfig.MaxWritingTasks
            }
                );


            //Successful or unsuccessful completion of one block in the pipeline will
            //cause completion of the next block in the pipeline
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };


            readingBlock.LinkTo(generateTestClass, linkOptions);
            generateTestClass.LinkTo(writeGeneratedFile, linkOptions);


            foreach (string path in filesPath)
            {
                readingBlock.Post(path);
            }

            //Mark the head of the pipeline as complete.
            readingBlock.Complete();

            await writeGeneratedFile.Completion;
        }
示例#32
0
        /// <summary>
        /// <see cref="DualParallelDispatcherRemoteNode{TInput1,TInput2,TOutput1,TOutput2}"/>
        /// </summary>
        /// <param name="persistentCache">Persistent cache to avoid dropped data on system crash</param>
        /// <param name="progress">Progress of the current bulk</param>
        /// <param name="host"><see cref="Host"/></param>
        /// <param name="cts"><see cref="CancellationTokenSource"/></param>
        /// <param name="circuitBreakerOptions"><see cref="CircuitBreakerOptions"/></param>
        /// <param name="clusterOptions"><see cref="ClusterOptions"/></param>
        /// <param name="logger"><see cref="ILogger"/></param>
        public DualParallelDispatcherRemoteNode(
            IAppCache persistentCache,
            IProgress <double> progress,
            Host host,
            CancellationTokenSource cts,
            CircuitBreakerOptions circuitBreakerOptions,
            ClusterOptions clusterOptions,
            ILogger logger) : base(
                Policy.Handle <Exception>()
                .AdvancedCircuitBreakerAsync(circuitBreakerOptions.CircuitBreakerFailureThreshold,
                                             circuitBreakerOptions.CircuitBreakerSamplingDuration,
                                             circuitBreakerOptions.CircuitBreakerMinimumThroughput,
                                             circuitBreakerOptions.CircuitBreakerDurationOfBreak,
                                             onBreak: (ex, timespan, context) =>
        {
            logger.LogError(
                $"Batch processor breaker: Breaking the circuit for {timespan.TotalMilliseconds}ms due to {ex.Message}.");
        },
                                             onReset: context =>
        {
            logger.LogInformation(
                "Batch processor breaker: Succeeded, closed the circuit.");
        },
                                             onHalfOpen: () =>
        {
            logger.LogWarning(
                "Batch processor breaker: Half-open, next call is a trial.");
        }), clusterOptions, progress, cts, logger)
        {
            _logger         = logger;
            _clusterOptions = clusterOptions;

            ISubject <LinkedItem <TInput1> > item1DispatcherSubject = new Subject <LinkedItem <TInput1> >();
            _item1SynchronizedDispatcherSubject             = Subject.Synchronize(item1DispatcherSubject);
            _item1SynchronizedDispatcherSubjectSubscription = _item1SynchronizedDispatcherSubject
                                                              .ObserveOn(new EventLoopScheduler(ts => new Thread(ts)))
                                                              .Select(item =>
            {
                return(Observable.FromAsync(() => persistentCache.AddItem1Async(item.Key.ToString(), item.Entity,
                                                                                item.CancellationTokenSource.Token)));
            })
                                                              .Merge()
                                                              .Subscribe();

            ISubject <LinkedItem <TInput2> > item2DispatcherSubject = new Subject <LinkedItem <TInput2> >();
            _item2SynchronizedDispatcherSubject             = Subject.Synchronize(item2DispatcherSubject);
            _item2SynchronizedDispatcherSubjectSubscription = _item2SynchronizedDispatcherSubject
                                                              .ObserveOn(new EventLoopScheduler(ts => new Thread(ts)))
                                                              .Select(item =>
            {
                return(Observable.FromAsync(() => persistentCache.AddItem2Async(item.Key.ToString(), item.Entity,
                                                                                item.CancellationTokenSource.Token)));
            })
                                                              .Merge()
                                                              .Subscribe();

            var channel = new Channel(host.MachineName, host.Port,
                                      ChannelCredentials.Insecure);
            _remoteContract      = MagicOnionClient.Create <IRemoteContract <TOutput1, TOutput2> >(channel);
            _item1RemoteContract = MagicOnionClient.Create <IOutputItem1RemoteContract <TInput1, TOutput1> >(channel);
            _item2RemoteContract = MagicOnionClient.Create <IOutputItem2RemoteContract <TInput2, TOutput2> >(channel);
            IRemoteNodeSubject nodeReceiver = new NodeReceiver(_logger);
            _remoteNodeHealthSubscription =
                nodeReceiver.RemoteNodeHealthSubject.Subscribe(remoteNodeHealth =>
            {
                NodeMetrics.RemoteNodeHealth = remoteNodeHealth;
            });
            _nodeHub = StreamingHubClient.Connect <INodeHub, INodeReceiver>(channel, (INodeReceiver)nodeReceiver);

            NodeMetrics = new NodeMetrics(Guid.NewGuid());

            var item1ProcessSource = new ConcurrentDictionary <Guid, TOutput1>();
            var item2ProcessSource = new ConcurrentDictionary <Guid, TOutput2>();
            var joinBlock          =
                new JoinBlock <KeyValuePair <Guid, CancellationTokenSource>, KeyValuePair <Guid, CancellationTokenSource> >(
                    new GroupingDataflowBlockOptions {
                Greedy = false
            });
            _item1Source =
                new TransformBlock <Tuple <Guid, TOutput1, CancellationTokenSource>,
                                    KeyValuePair <Guid, CancellationTokenSource>
                                    >(source =>
            {
                if (!item1ProcessSource.ContainsKey(source.Item1) &&
                    !item1ProcessSource.TryAdd(source.Item1, source.Item2))
                {
                    _logger.LogError(
                        $"Could not add item of type {source.Item2.GetType()} and key {source.Item1.ToString()} to the buffer.");
                }

                return(new KeyValuePair <Guid, CancellationTokenSource>(source.Item1, source.Item3));
            });
            _item2Source =
                new TransformBlock <Tuple <Guid, TOutput2, CancellationTokenSource>,
                                    KeyValuePair <Guid, CancellationTokenSource>
                                    >(
                    source =>
            {
                if (!item2ProcessSource.ContainsKey(source.Item1) &&
                    !item2ProcessSource.TryAdd(source.Item1, source.Item2))
                {
                    _logger.LogError(
                        $"Could not add item of type {source.Item2.GetType()} and key {source.Item1.ToString()} to the buffer.");
                }

                return(new KeyValuePair <Guid, CancellationTokenSource>(source.Item1, source.Item3));
            });

            var processBlock = new ActionBlock <Tuple <KeyValuePair <Guid, CancellationTokenSource>,
                                                       KeyValuePair <Guid, CancellationTokenSource> > >(
                async combined =>
            {
                var policy = Policy
                             .Handle <Exception>(ex => !(ex is TaskCanceledException || ex is OperationCanceledException))
                             .WaitAndRetryAsync(_clusterOptions.RetryAttempt,
                                                retryAttempt =>
                                                TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)),
                                                (exception, sleepDuration, retry, context) =>
                {
                    if (retry >= _clusterOptions.RetryAttempt)
                    {
                        _logger.LogError(
                            $"Could not process item after {retry} retry times: {exception.Message}");
                    }
                });

                var policyResult = await policy.ExecuteAndCaptureAsync(async ct =>
                {
                    try
                    {
                        if (CpuUsage > _clusterOptions.LimitCpuUsage)
                        {
                            var suspensionTime = (CpuUsage - _clusterOptions.LimitCpuUsage) / CpuUsage * 100;
                            await Task.Delay((int)suspensionTime, ct);
                        }

                        if (item1ProcessSource.ContainsKey(combined.Item1.Key) &&
                            item2ProcessSource.ContainsKey(combined.Item2.Key) &&
                            item1ProcessSource.TryGetValue(combined.Item1.Key, out var item1) &&
                            item2ProcessSource.TryGetValue(combined.Item2.Key, out var item2))
                        {
                            await _remoteContract.ProcessRemotely(item1, item2, NodeMetrics);
                            combined.Item1.Value.Cancel();
                            combined.Item2.Value.Cancel();
                        }
                    }
                    catch (Exception ex) when(ex is TaskCanceledException || ex is OperationCanceledException)
                    {
                        _logger.LogTrace("The item process has been cancelled.");
                    }
                }, cts.Token).ConfigureAwait(false);

                if (policyResult.Outcome == OutcomeType.Failure)
                {
                    _logger.LogCritical(
                        policyResult.FinalException != null
                                ? $"Could not process item: {policyResult.FinalException.Message}."
                                : "An error has occured while processing the item.");
                }

                if (!item1ProcessSource.TryRemove(combined.Item1.Key, out _))
                {
                    _logger.LogWarning(
                        $"Could not remove item of key {combined.Item1.ToString()} from the buffer.");
                }

                if (!item2ProcessSource.TryRemove(combined.Item2.Key, out _))
                {
                    _logger.LogWarning(
                        $"Could not remove item of key {combined.Item2.ToString()} from the buffer.");
                }
            });

            var options = new DataflowLinkOptions
            {
                PropagateCompletion = true
            };

            _item1Source.LinkTo(joinBlock.Target1, options);
            _item2Source.LinkTo(joinBlock.Target2, options);
            joinBlock.LinkTo(processBlock, options);
        }
示例#33
0
        /// <summary>
        /// Получает и кэширует список сотрудников с информацией по отделу из БД employee,
        /// получает зарплату сотрудников из веб-сервиса бухгалтерского отдела по коду сотрудника из сервиса кадровиков.
        /// </summary>
        /// <param name="year"></param>
        /// <param name="month"></param>
        /// <returns></returns>
        public Task <IEnumerable <Employee> > GetEmployeesWithSalaryAsync(int year, int month)
        {
            // Кэширование объекта или возврат объекта из кэша по ключу.
            return(_cache.GetOrCreateAsync(CACHE_KEY + month + year, async(entry) =>
            {
                // Получение списка всех сотрудников.
                var employees = await _employeeRepository.GetEmployeesAsync();

                // Параметры, используемые для настройки обработки, выполняемой блоками.
                var blockOptions = new ExecutionDataflowBlockOptions()
                {
                    MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded
                };

                // Блок получения кода сотрудника.
                var getInnBlock = new TransformBlock <Employee, Employee>(async emp =>
                {
#if DEBUG
                    await Task.Delay(5000); // Эмулируем задержку ответа сервиса.
#endif
                    emp.BuhCode = await _buhApiClient.GetBuhCodeByInnAsync(emp.Inn);

                    return emp;
                }, blockOptions);

                // Блок обработки получения зарплаты сотрудника.
                var getSalaryBlock = new ActionBlock <Employee>(async emp =>
                {
#if DEBUG
                    await Task.Delay(5000); // Эмулируем задержку ответа сервиса.
#endif
                    emp.Salary = await _salaryApiClient.GetSalaryAsync(emp, year, month);
                }, blockOptions);

                // Параметры, импользуемые для настройки связи между блоками.
                var linkOptions = new DataflowLinkOptions {
                    PropagateCompletion = true
                };

                // Определение связи блоков.
                getInnBlock.LinkTo(getSalaryBlock, linkOptions);

                // Инициализация обработки.
                foreach (var employee in employees)
                {
                    getInnBlock.Post(employee);
                }

                // Оповещение блока о завершении инициализации обработок.
                getInnBlock.Complete();

                // Ожидание завершения обработки данных.
                getSalaryBlock.Completion.Wait();

                // Добавляем сотрудников в кэш.
                entry.Value = employees;

                // Срок хранения объекта в кэше.
                entry.SlidingExpiration = TimeSpan.FromHours(10);

                return employees;
            }));
        }
示例#34
0
        public Task Execute(CancellationToken token)
        {
            var options = new DataflowBlockOptions {
                CancellationToken = token
            };

            _buffer = new BufferBlock <long>(options);

            var hydrate = new TransformBlock <long, Summoner>(id =>
            {
                var summoner = _lookup.Hydrate(id);
                return(summoner);
            }, new ExecutionDataflowBlockOptions {
                CancellationToken = token, MaxDegreeOfParallelism = 2
            });

            var store = new TransformBlock <Summoner, Summoner>(summoner =>
            {
                if (summoner != null)
                {
                    _storage.Store(summoner);
                }

                return(summoner);
            }, new ExecutionDataflowBlockOptions {
                CancellationToken = token, MaxDegreeOfParallelism = 2
            });

            var crawl = new TransformManyBlock <Summoner, FellowPlayerInfo>(async summoner =>
            {
                var summoners = new List <FellowPlayerInfo>();
                var games     = new List <PlayerGameStats>();
                if (summoner != null)
                {
                    await _crawler.Crawl(summoner, summoners.Add, games.Add);
                }
                return(summoners);
            }, new ExecutionDataflowBlockOptions {
                CancellationToken = token, MaxDegreeOfParallelism = 2
            });

            var storeNextBatch = new ActionBlock <FellowPlayerInfo>(async info =>
            {
                if (info != null)
                {
                    var data = await _lookup.Lookup(info.summonerId);
                    _storage.StoreWhenMissing(data);
                }
            }, new ExecutionDataflowBlockOptions {
                CancellationToken = token, MaxDegreeOfParallelism = 2
            });

            _buffer.LinkTo(hydrate, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            hydrate.LinkTo(store, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            store.LinkTo(crawl, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            crawl.LinkTo(storeNextBatch, new DataflowLinkOptions {
                PropagateCompletion = true
            });

            return(Task.Run(async() =>
            {
                while (!token.IsCancellationRequested)
                {
                    try
                    {
                        var batch = _producer.Produce((int)TimeSpan.FromDays(1).TotalMinutes, 30);
                        foreach (var id in batch)
                        {
                            await _buffer.SendAsync(id, token);
                        }

                        // Start the chain
                        _buffer.Complete();

                        // Wait until the chain is complete before iterating again
                        await storeNextBatch.Completion;
                    }
                    catch (Exception ex)
                    {
                        throw;
                    }
                }
            }, token));
        }
示例#35
0
        Task DoBulkParallel()
        {
            BufferBlock<string> fileNameStore = new BufferBlock<string>();
            int maxParallelism = ImageEngine.NumThreads == 1 ? 1 :
                (ImageEngine.NumThreads == -1 ? Environment.ProcessorCount : ImageEngine.NumThreads);


            // Define block to perform each conversion
            var encoder = new TransformBlock<string, Tuple<byte[], string>>(file =>
            {
                byte[] data = null;

                string filename = Path.GetFileNameWithoutExtension(file) + "." + ImageFormats.GetExtensionOfFormat(SaveFormat);
                string path = Path.Combine(BulkUseSourceDestination ? Path.GetDirectoryName(file) : BulkSaveFolder, filename);
                path = UsefulThings.General.FindValidNewFileName(path);

                using (ImageEngineImage img = new ImageEngineImage(file))
                {
                    try
                    {
                        data = img.Save(SaveFormat, SaveMipType, removeAlpha: GeneralRemovingAlpha, customMasks: customMasks);
                    }
                    catch (Exception e)
                    {
                        BulkConvertFailed.Add(path + "  Reason: " + e.ToString());
                    }
                }

                BulkProgressValue++;
                BulkStatus = $"Converting {BulkProgressValue}/{BulkProgressMax} images.";
                return new Tuple<byte[], string>(data, path);
            }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxParallelism, BoundedCapacity = maxParallelism });

            // Define block to write converted data to disk
            var diskWriter = new ActionBlock<Tuple<byte[], string>>(tuple =>
            {
                File.WriteAllBytes(tuple.Item2, tuple.Item1);
            }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, BoundedCapacity = maxParallelism });  // Limit to 2 disk write operations at a time, but allow many to be stored in it's buffer.


            // Link blocks together
            fileNameStore.LinkTo(encoder, new DataflowLinkOptions { PropagateCompletion = true });
            encoder.LinkTo(diskWriter, new DataflowLinkOptions { PropagateCompletion = true });

            // Begin production
            new Action(async () =>
            {
                foreach (var file in BulkConvertFiles)
                    await fileNameStore.SendAsync(file);

                fileNameStore.Complete();
            }).Invoke();

            return diskWriter.Completion;
        }
示例#36
0
        private Tuple <ITargetBlock <Type>, Task <DiscoveredParts> > CreateDiscoveryBlockChain(bool typeExplicitlyRequested, IProgress <DiscoveryProgress> progress, CancellationToken cancellationToken)
        {
            string status         = Strings.ScanningMEFAssemblies;
            int    typesScanned   = 0;
            var    transformBlock = new TransformBlock <Type, object>(
                type =>
            {
                try
                {
                    return(this.CreatePart(type, typeExplicitlyRequested));
                }
                catch (Exception ex)
                {
                    return(new PartDiscoveryException(string.Format(CultureInfo.CurrentCulture, Strings.FailureWhileScanningType, type.FullName), ex)
                    {
                        AssemblyPath = type.GetTypeInfo().Assembly.Location, ScannedType = type
                    });
                }
            },
                new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Debugger.IsAttached ? 1 : Environment.ProcessorCount,
                CancellationToken      = cancellationToken,
                MaxMessagesPerTask     = 10,
                BoundedCapacity        = 100,
            });
            var parts            = ImmutableHashSet.CreateBuilder <ComposablePartDefinition>();
            var errors           = ImmutableList.CreateBuilder <PartDiscoveryException>();
            var aggregatingBlock = new ActionBlock <object>(partOrException =>
            {
                var part  = partOrException as ComposablePartDefinition;
                var error = partOrException as PartDiscoveryException;
                Debug.Assert(partOrException is Exception == partOrException is PartDiscoveryException, "Wrong exception type returned.");
                if (part != null)
                {
                    parts.Add(part);
                }
                else if (error != null)
                {
                    errors.Add(error);
                }

                progress.ReportNullSafe(new DiscoveryProgress(++typesScanned, 0, status));
            });

            transformBlock.LinkTo(aggregatingBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });

            var tcs = new TaskCompletionSource <DiscoveredParts>();

            Task.Run(async delegate
            {
                try
                {
                    await aggregatingBlock.Completion;
                    tcs.SetResult(new DiscoveredParts(parts.ToImmutable(), errors.ToImmutable()));
                }
                catch (Exception ex)
                {
                    tcs.SetException(ex);
                }
            });

            return(Tuple.Create <ITargetBlock <Type>, Task <DiscoveredParts> >(transformBlock, tcs.Task));
        }
示例#37
0
    public async Task ProcessFiles(string path, IProgress <ProgressReport> progress, RichTextBox txt_description)
    {
        int totalFilesFound    = 0;
        int totalFilesRead     = 0;
        int totalFilesHashed   = 0;
        int totalFilesUploaded = 0;

        DateTime lastReported = DateTime.UtcNow;

        void ReportProgress()
        {
            if (DateTime.UtcNow - lastReported < TimeSpan.FromSeconds(1)) //Try to fire only once a second, but this code is not perfect so you may get a few rapid fire.
            {
                return;
            }
            lastReported = DateTime.UtcNow;
            var report = new ProgressReport(totalFilesFound, totalFilesRead, totalFilesHashed, totalFilesUploaded);

            progress.Report(report);
        }

        var getFilesBlock = new TransformBlock <string, Dto>(filePath =>
        {
            var dto = new Dto(filePath, File.ReadAllBytes(filePath));
            totalFilesRead++; //safe because single threaded.
            return(dto);
        });

        var hashFilesBlock = new TransformBlock <Dto, Dto>(inDto =>
        {
            using (var md5 = System.Security.Cryptography.MD5.Create())
            {
                var hash   = md5.ComputeHash(inDto.Data);
                var outDto = new Dto(inDto.FilePath, hash, inDto.RelativePath, BitConverter.ToString(hash).Replace("-", ""));
                Interlocked.Increment(ref totalFilesHashed);     //Need the interlocked due to multithreaded.
                ReportProgress();
                return(outDto);
            }
        },
                                                           new ExecutionDataflowBlockOptions {
            MaxDegreeOfParallelism = Environment.ProcessorCount, BoundedCapacity = 50
        });
        var writeToDatabaseBlock = new ActionBlock <Dto>(arg =>
        {
            //Write to database here.
            txt_description.AppendLine($"{arg.RelativePath} ({arg.MD5})");
            // Main.AppendText(arg.FilePath);
            totalFilesUploaded++;
            ReportProgress();
        },
                                                         new ExecutionDataflowBlockOptions {
            BoundedCapacity = 50
        });

        getFilesBlock.LinkTo(hashFilesBlock, new DataflowLinkOptions {
            PropagateCompletion = true
        });
        hashFilesBlock.LinkTo(writeToDatabaseBlock, new DataflowLinkOptions {
            PropagateCompletion = true
        });

        foreach (var filePath in Directory.EnumerateFiles(path, "*.*", SearchOption.AllDirectories))
        {
            await getFilesBlock.SendAsync(filePath).ConfigureAwait(false);

            totalFilesFound++;
            ReportProgress();
        }

        getFilesBlock.Complete();

        await writeToDatabaseBlock.Completion.ConfigureAwait(false);

        ReportProgress();
    }
示例#38
0
    static void Main()
    {
        // <snippet3>
        //
        // Create the members of the pipeline.
        //

        // Downloads the requested resource as a string.
        var downloadString = new TransformBlock <string, string>(async uri =>
        {
            Console.WriteLine("Downloading '{0}'...", uri);

            return(await new HttpClient().GetStringAsync(uri));
        });

        // Separates the specified text into an array of words.
        var createWordList = new TransformBlock <string, string[]>(text =>
        {
            Console.WriteLine("Creating word list...");

            // Remove common punctuation by replacing all non-letter characters
            // with a space character.
            char[] tokens = text.Select(c => char.IsLetter(c) ? c : ' ').ToArray();
            text          = new string(tokens);

            // Separate the text into an array of words.
            return(text.Split(new[] { ' ' }, StringSplitOptions.RemoveEmptyEntries));
        });

        // Removes short words and duplicates.
        var filterWordList = new TransformBlock <string[], string[]>(words =>
        {
            Console.WriteLine("Filtering word list...");

            return(words
                   .Where(word => word.Length > 3)
                   .Distinct()
                   .ToArray());
        });

        // Finds all words in the specified collection whose reverse also
        // exists in the collection.
        var findReversedWords = new TransformManyBlock <string[], string>(words =>
        {
            Console.WriteLine("Finding reversed words...");

            var wordsSet = new HashSet <string>(words);

            return(from word in words.AsParallel()
                   let reverse = new string(word.Reverse().ToArray())
                                 where word != reverse && wordsSet.Contains(reverse)
                                 select word);
        });

        // Prints the provided reversed words to the console.
        var printReversedWords = new ActionBlock <string>(reversedWord =>
        {
            Console.WriteLine("Found reversed words {0}/{1}",
                              reversedWord, new string(reversedWord.Reverse().ToArray()));
        });
        // </snippet3>

        // <snippet4>
        //
        // Connect the dataflow blocks to form a pipeline.
        //

        var linkOptions = new DataflowLinkOptions {
            PropagateCompletion = true
        };

        downloadString.LinkTo(createWordList, linkOptions);
        createWordList.LinkTo(filterWordList, linkOptions);
        filterWordList.LinkTo(findReversedWords, linkOptions);
        findReversedWords.LinkTo(printReversedWords, linkOptions);
        // </snippet4>

        // <snippet6>
        // Process "The Iliad of Homer" by Homer.
        downloadString.Post("http://www.gutenberg.org/files/6130/6130-0.txt");
        // </snippet6>

        // <snippet7>
        // Mark the head of the pipeline as complete.
        downloadString.Complete();
        // </snippet7>

        // <snippet8>
        // Wait for the last block in the pipeline to process all messages.
        printReversedWords.Completion.Wait();
        // </snippet8>
    }
示例#39
0
        /// <summary>
        /// Opens the configured JobWorker to activate jobs in the given poll interval
        /// and handle with the given handler.
        /// </summary>
        internal void Open()
        {
            isRunning = true;
            var cancellationToken = source.Token;

            activateJobsCommand = jobWorkerBuilder.Command;
            maxJobsActive       = jobWorkerBuilder.Command.Request.MaxJobsToActivate;

            // Create options for blocks
            var bufferOptions = new DataflowBlockOptions
            {
                CancellationToken = cancellationToken,
                EnsureOrdered     = false
            };

            var executionOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = jobWorkerBuilder.ThreadCount,
                CancellationToken      = cancellationToken,
                EnsureOrdered          = false
            };

            // Buffer for polled jobs
            var input = new BufferBlock <IJob>(bufferOptions);

            // Transformblock to process polled jobs
            var transformer = new TransformBlock <IJob, IJob>(async activatedJob =>
            {
                var jobClient = JobClientWrapper.Wrap(jobWorkerBuilder.JobClient);

                try
                {
                    await jobHandler(jobClient, activatedJob);
                    await TryToAutoCompleteJob(jobClient, activatedJob);
                }
                catch (Exception exception)
                {
                    await FailActivatedJob(jobClient, activatedJob, cancellationToken, exception);
                }
                finally
                {
                    jobClient.Reset();
                }

                return(activatedJob);
            },
                                                              executionOptions);

            // Action block to finalize handled tasks
            var output = new ActionBlock <IJob>(activatedJob => { currentJobsActive--; },
                                                executionOptions);

            // Link blocks
            input.LinkTo(transformer);
            transformer.LinkTo(output);

            // Start polling
            Task.Run(async() =>
            {
                while (!source.IsCancellationRequested)
                {
                    if (currentJobsActive >= maxJobsActive)
                    {
                        await Task.Delay(pollInterval);
                        continue;
                    }

                    var jobCount = maxJobsActive - currentJobsActive;
                    activateJobsCommand.MaxJobsToActivate(jobCount);

                    try
                    {
                        var response = await activateJobsCommand.Send(null, cancellationToken);

                        logger?.LogDebug(
                            "Job worker ({worker}) activated {activatedCount} of {requestCount} successfully.",
                            activateJobsCommand.Request.Worker,
                            response.Jobs.Count,
                            jobCount);

                        foreach (var job in response.Jobs)
                        {
                            await input.SendAsync(job);
                            currentJobsActive++;
                        }
                    }
                    catch (RpcException rpcException)
                    {
                        LogRpcException(rpcException);
                    }
                }
            },
                     cancellationToken).ContinueWith(
                t => logger?.LogError(t.Exception, "Job polling failed."),
                TaskContinuationOptions.OnlyOnFaulted);

            var command = jobWorkerBuilder.Command;

            logger?.LogDebug(
                "Job worker ({worker}) for job type {type} has been opened.",
                command.Request.Worker,
                command.Request.Type);
        }
示例#40
0
        public void Start(ISenderCallback callback)
        {
            _callback = callback;

            _sender = new ActionBlock <OutgoingMessageBatch>(SendBatch, new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 1,
                CancellationToken      = _cancellation,
                BoundedCapacity        = DataflowBlockOptions.Unbounded
            });

            _sender.Completion.ContinueWith(x =>
            {
                if (x.IsFaulted)
                {
                    _logger.LogException(x.Exception);
                }
            }, _cancellation);

            _serializing = new ActionBlock <Envelope>(async e =>
            {
                try
                {
                    e.EnsureData();
                    await _batching.SendAsync(e);
                }
                catch (Exception ex)
                {
                    _logger.LogException(ex, message: $"Error while trying to serialize envelope {e}");
                }
            },
                                                      new ExecutionDataflowBlockOptions
            {
                CancellationToken = _cancellation,
                BoundedCapacity   = DataflowBlockOptions.Unbounded
            });


            _serializing.Completion.ContinueWith(x =>
            {
                if (x.IsFaulted)
                {
                    _logger.LogException(x.Exception);
                }
            }, _cancellation);


            _batchWriting = new TransformBlock <Envelope[], OutgoingMessageBatch>(
                envelopes =>
            {
                var batch = new OutgoingMessageBatch(Destination, envelopes);
                _queued  += batch.Messages.Count;
                return(batch);
            },
                new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = DataflowBlockOptions.Unbounded, MaxDegreeOfParallelism = 10
            });

            _batchWriting.Completion.ContinueWith(x =>
            {
                if (x.IsFaulted)
                {
                    _logger.LogException(x.Exception);
                }
            }, _cancellation);

            _batchWriting.LinkTo(_sender);

            _batching = new BatchingBlock <Envelope>(200, _batchWriting, _cancellation);
            _batching.Completion.ContinueWith(x =>
            {
                if (x.IsFaulted)
                {
                    _logger.LogException(x.Exception);
                }
            }, _cancellation);
        }
示例#41
0
        static async void StartCrawling()
        {
            if (!Directory.Exists("Images"))
            {
                Directory.CreateDirectory("Images");
            }
            try
            {
                #region Dataflow block Options

                var downloaderOptions = new ExecutionDataflowBlockOptions
                {
                    // enforce fairness, after handling n messages
                    // the block's task will be re-schedule.
                    // this will give the opportunity for other block
                    // to actively process there messages (to avoid over subscription
                    // the Tpl dataflow does not schedule all task at once if the machine
                    // does not have enough cores)
                    MaxMessagesPerTask = DOWNLOADER_MAX_MESSAGE_PER_TASK,
                    // by default Tpl dataflow assign a single task per block,
                    // but you can control it by using the MaxDegreeOfParallelism
                    MaxDegreeOfParallelism = DOWNLOADER_MAX_DEGREE_OF_PARALLELISM,
                    // the size of the block input buffer
                    BoundedCapacity = DOWNLOADER_BOUNDED_CAPACITY
                };

                var transformerOptions = new ExecutionDataflowBlockOptions
                {
                    MaxMessagesPerTask = MAX_MESSAGE_PER_TASK,
                };

                var writerOptions = new ExecutionDataflowBlockOptions
                {
                    // by default Tpl dataflow assign a single task per block,
                    // but you can control it by using the MaxDegreeOfParallelism
                    MaxDegreeOfParallelism = WRITER_MAX_DEGREE_OF_PARALLELISM,
                    // MaxMessagesPerTask = MAX_MESSAGE_PER_TASK,
                };

                var linkOption = new DataflowLinkOptions {
                    PropagateCompletion = true
                };

                #endregion // Dataflow block Options

                #region Downloader

                var downloader = new TransformBlock <string, HttpContentInfo>( // "text/html, image/jpeg"
                    async(url) =>
                {
                    try
                    {
                        #region Validation

                        if (_urls.ContainsKey(url))
                        {
                            return(null);
                        }
                        _urls.TryAdd(url, true);

                        if (!ShouldContinue(url))
                        {
                            return(null);
                        }

                        #endregion     // Validation

                        HttpClient client = new HttpClient();
                        client.Timeout    = TimeSpan.FromSeconds(DOWNLOAD_TIMEOUT_SEC);

                        //Trace.WriteLine("Downloading: " + url);

                        // using IOCP the thread pool worker thread does return to the pool
                        HttpResponseMessage response = await client.GetAsync(url);
                        if (!response.IsSuccessStatusCode)
                        {
                            WriteToConsole("Fail to download html: [{0}] \r\n\tStatus Code = {1}", ConsoleColor.Red, url, response.StatusCode);
                            return(null);
                        }
                        HttpContent content = response.Content;

                        var contentType  = content.Headers.ContentType;
                        string mediaType = contentType.MediaType;

                        #region Validation

                        if (contentType == null)
                        {
                            WriteToConsole("Unknown content type [{0}]: {1}", ConsoleColor.Gray,
                                           mediaType, url);
                            return(null);
                        }

                        #endregion     // Validation
                        WriteToConsole("Downloaded [{0}]: {1}", ConsoleColor.White,
                                       mediaType, url);

                        var info = new HttpContentInfo(url, response.Content);
                        if (!IsMediaType(info, "text/html"))
                        {
                            Trace.WriteLine("Downloaded [" + mediaType + "]: " + url);
                        }
                        return(info);
                    }
                    #region Exception Handling

                    catch (UriFormatException ex)
                    {
                        WriteToConsole("invalid URL", ConsoleColor.Red, ex.Message);
                    }
                    catch (WebException ex)
                    {
                        WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, url, ex.Message);
                    }
                    catch (AggregateException ex)
                    {
                        foreach (var exc in ex.Flatten().InnerExceptions)
                        {
                            WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, url, exc.Message);
                        }
                    }
                    catch (Exception ex)
                    {
                        WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message);
                    }

                    #endregion     // Exception Handling

                    return(null);
                }, downloaderOptions);

                #endregion // Downloader

                #region Parser

                var parser = new TransformManyBlock <HttpContentInfo, string>(
                    async contentInfo =>
                {
                    HttpContent content = contentInfo.Content;

                    // using IOCP the thread pool worker thread does return to the pool
                    string html = await content.ReadAsStringAsync();
                    var output  = new List <string>();
                    try
                    {
                        var links = _linkRegex.Matches(html);
                        foreach (Match item in links)
                        {
                            var value = item.Value;
                            //Trace.WriteLine("\t\tPARSED: " + value);
                            output.Add(value);
                        }
                    }
                    #region Exception Handling

                    catch (Exception ex)
                    {
                        WriteToConsole("Error {0}", ConsoleColor.Red, ex.Message);
                    }

                    #endregion     // Exception Handling

                    return(output);
                }, transformerOptions);

                #endregion // Parser

                #region Writer

                var writer = new ActionBlock <HttpContentInfo>(async contentInfo =>
                {
                    try
                    {
                        HttpContent content = contentInfo.Content;

                        // using IOCP the thread pool worker thread does return to the pool
                        using (Stream source = await content.ReadAsStreamAsync())
                            using (var image = Image.FromStream(source))
                            {
                                string fileName = Path.GetFileName(contentInfo.Url);

                                //Trace.WriteLine("\tWRITTING: " + contentInfo.Url);

                                #region Validation

                                if (!_images.TryAdd(fileName, true))
                                {
                                    return;
                                }

                                if (image.Width < MIN_SIZE.Width || image.Height < MIN_SIZE.Height)
                                {
                                    return;
                                }

                                #endregion // Validation

                                string name = @"Images\" + fileName;

                                using (Stream dest = OpenWriteAsync(name))
                                {
                                    source.Position = 0;
                                    // using IOCP the thread pool worker thread does return to the pool
                                    await source.CopyToAsync(dest);
                                    WriteToConsole("{0}: Width:{1}, Height:{2}", ConsoleColor.Yellow,
                                                   fileName, image.Width, image.Height);
                                }
                            }
                    }
                    #region Exception Handling

                    catch (WebException ex)
                    {
                        WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, ex.Message);
                    }
                    catch (Exception ex)
                    {
                        WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message);
                    }

                    #endregion // Exception Handling
                }, writerOptions);

                #endregion // Writer

                var garbageContent = DataflowBlock.NullTarget <HttpContentInfo>();
                var garbageUrl     = DataflowBlock.NullTarget <string>();


                #region LinkTo

                ////////////////////////////////////////////////////////
                //                                                    //
                //   garbage  <-------  downloader <--------------    //
                //                      /           \            |    //
                //                  writer         parser --------    //
                //                                                    //
                ////////////////////////////////////////////////////////
                downloader.LinkTo(writer, linkOption, _isImage);
                downloader.LinkTo(parser, linkOption, info => info != null);
                downloader.LinkTo(garbageContent, linkOption); // fallback (otherwise empty messages will be stack in the block buffer and the block will never complete)
                parser.LinkTo(downloader, linkOption, url => !string.IsNullOrEmpty(url));
                parser.LinkTo(garbageUrl, linkOption);

                #endregion // LinkTo

                downloader.Post(URL_CRAWL_TARGET);

                Console.WriteLine("Crawling");
                Thread.Sleep(COMPLETE_AFTER_SEC * 1000);

                #region Complete

                downloader.Complete();

                #region WriteToConsole ("Try to Complete...")

                ConsoleColor color = ConsoleColor.Yellow;
                WriteToConsole(
                    @"Try to Complete (items in the buffer = 
            downloader:         is completed = {0}, input={1} , output={2}
            writer:             is completed = {3}, input ={4}
            parser:             is completed = {5}, input={6} , output={7}", color,
                    downloader.Completion.IsCompleted, downloader.InputCount, downloader.OutputCount,
                    writer.Completion.IsCompleted, writer.InputCount,
                    parser.Completion.IsCompleted, parser.InputCount, parser.OutputCount);

                #endregion // WriteToConsole ("Try to Complete...")

                Task completeAll = Task.WhenAll(
                    downloader.Completion,
                    parser.Completion,
                    writer.Completion);

                await Task.Run(async() =>
                {
                    while (!completeAll.IsCompleted)
                    {
                        await Task.Delay(2000);

                        #region WriteToConsole (status)

                        color = color == ConsoleColor.Magenta ? ConsoleColor.White : ConsoleColor.Yellow;

                        WriteToConsole(
                            @"Complete Status (items in the buffer = 
            downloader:         is completed = {0}, input={1} , output={2}
            writer:             is completed = {3}, input ={4}
            parser:         is completed = {5}, input={6} , output={7}", color,
                            downloader.Completion.IsCompleted, downloader.InputCount, downloader.OutputCount,
                            writer.Completion.IsCompleted, writer.InputCount,
                            parser.Completion.IsCompleted, parser.InputCount, parser.OutputCount);
                    }

                    #endregion     // WriteToConsole (status)
                });

                WriteToConsole("Complete (items in the writer input buffer = {0})", ConsoleColor.Green, writer.InputCount);

                #endregion // Complete
            }
            catch (Exception ex)
            {
                WriteToConsole("EXCEPTION: {0}", ConsoleColor.DarkRed, ex);
            }
        }
示例#42
0
        public SMSOperationWorker()
        {
            try
            {
                if (string.IsNullOrEmpty(SP.ep.SendBlockDueTime))
                {
                    DueTime = Convert.ToInt32(SP.ep.SendBlockDueTime);
                }

                if (string.IsNullOrEmpty(SP.ep.SendBlockBatchMaxNum))
                {
                    BatchMaxNum = Convert.ToInt32(SP.ep.SendBlockBatchMaxNum);
                }

                this.KeyParam = "";

                _logCaches = new BatchBlock <ReceiveMsgStruct>(BatchMaxNum);

                mainTB = new TransformBlock <ReceiveMsgStruct[], List <SendMsgStruct> >(array =>
                {
                    try
                    {
                        return(EmaySendAction(array));
                    }
                    catch (Exception ex)
                    {
                        AsyncHelper.RunSync <bool>(() => Manager.Instance.WriteLogFile("EmaySendAction出现异常", ex));
                        return(new List <SendMsgStruct>());
                    }
                },
                                                                                        new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 1
                });

                spareTB = new ActionBlock <List <SendMsgStruct> >(array =>
                {
                    try
                    {
                        if (array != null && array.Count > 0)
                        {
                            CnkiSendAction(array);
                        }
                    }
                    catch (Exception ex)
                    {
                        AsyncHelper.RunSync <bool>(() => Manager.Instance.WriteLogFile("CnkiSendAction出现异常", ex));
                    }
                },
                                                                  new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 1
                });


                mainTB.LinkTo(spareTB);
                _logCaches.LinkTo(mainTB);

                triggerBatchTimer = new Timer((obj) => _logCaches.TriggerBatch());

                _logCaches.Completion.ContinueWith(delegate { mainTB.Complete(); });
                mainTB.Completion.ContinueWith(delegate { spareTB.Complete(); });
            }
            catch (Exception ex)
            {
                AsyncHelper.RunSync <bool>(() => Manager.Instance.WriteLogFile("SMSOperationWorker出现异常", ex));
            }
        }
示例#43
0
        // Creates the image processing dataflow network and returns the
        // head node of the network.
        ITargetBlock <string> CreateImageProcessingNetwork()
        {
            //
            // Create the dataflow blocks that form the network.
            //

            // Create a dataflow block that takes a folder path as input
            // and returns a collection of Bitmap objects.
            var loadBitmaps = new TransformBlock <string, IEnumerable <Bitmap> >(path =>
            {
                try
                {
                    return(LoadBitmaps(path));
                }
                catch (OperationCanceledException)
                {
                    // Handle cancellation by passing the empty collection
                    // to the next stage of the network.
                    return(Enumerable.Empty <Bitmap>());
                }
            });

            // Create a dataflow block that takes a collection of Bitmap objects
            // and returns a single composite bitmap.
            var createCompositeBitmap = new TransformBlock <IEnumerable <Bitmap>, Bitmap>(bitmaps =>
            {
                try
                {
                    return(CreateCompositeBitmap(bitmaps));
                }
                catch (OperationCanceledException)
                {
                    // Handle cancellation by passing null to the next stage
                    // of the network.
                    return(null);
                }
            });

            // Create a dataflow block that displays the provided bitmap on the form.
            var displayCompositeBitmap = new ActionBlock <Bitmap>(bitmap =>
            {
                // Display the bitmap.
                pictureBox1.SizeMode = PictureBoxSizeMode.StretchImage;
                pictureBox1.Image    = bitmap;

                // Enable the user to select another folder.
                toolStripButton1.Enabled = true;
                toolStripButton2.Enabled = false;
                Cursor = DefaultCursor;
            },
                                                                  // Specify a task scheduler from the current synchronization context
                                                                  // so that the action runs on the UI thread.
                                                                  new ExecutionDataflowBlockOptions
            {
                TaskScheduler = TaskScheduler.FromCurrentSynchronizationContext()
            });

            // Create a dataflow block that responds to a cancellation request by
            // displaying an image to indicate that the operation is cancelled and
            // enables the user to select another folder.
            var operationCancelled = new ActionBlock <object>(delegate
            {
                // Display the error image to indicate that the operation
                // was cancelled.
                pictureBox1.SizeMode = PictureBoxSizeMode.CenterImage;
                pictureBox1.Image    = pictureBox1.ErrorImage;

                // Enable the user to select another folder.
                toolStripButton1.Enabled = true;
                toolStripButton2.Enabled = false;
                Cursor = DefaultCursor;
            },
                                                              // Specify a task scheduler from the current synchronization context
                                                              // so that the action runs on the UI thread.
                                                              new ExecutionDataflowBlockOptions
            {
                TaskScheduler = TaskScheduler.FromCurrentSynchronizationContext()
            });

            //
            // Connect the network.
            //

            // Link loadBitmaps to createCompositeBitmap.
            // The provided predicate ensures that createCompositeBitmap accepts the
            // collection of bitmaps only if that collection has at least one member.
            loadBitmaps.LinkTo(createCompositeBitmap, bitmaps => bitmaps.Count() > 0);

            // Also link loadBitmaps to operationCancelled.
            // When createCompositeBitmap rejects the message, loadBitmaps
            // offers the message to operationCancelled.
            // operationCancelled accepts all messages because we do not provide a
            // predicate.
            loadBitmaps.LinkTo(operationCancelled);

            // Link createCompositeBitmap to displayCompositeBitmap.
            // The provided predicate ensures that displayCompositeBitmap accepts the
            // bitmap only if it is non-null.
            createCompositeBitmap.LinkTo(displayCompositeBitmap, bitmap => bitmap != null);

            // Also link createCompositeBitmap to operationCancelled.
            // When displayCompositeBitmap rejects the message, createCompositeBitmap
            // offers the message to operationCancelled.
            // operationCancelled accepts all messages because we do not provide a
            // predicate.
            createCompositeBitmap.LinkTo(operationCancelled);

            // Return the head of the network.
            return(loadBitmaps);
        }
示例#44
0
        public void Init()
        {
            _massMail             = new MassMail(Config.BlockSize, Config.UserAgent, Config.ConnectionString, Config.Mode);
            _templateCache        = new ConcurrentDictionary <long, Lazy <Template> >();
            _attachmentCache      = new ConcurrentDictionary <long, Lazy <Attach> >();
            _dkimSignerCache      = new ConcurrentDictionary <string, DkimSigner>();
            _domailKeySignerCache = new ConcurrentDictionary <string, DomainKeySigner>();

            //Get all private keys
            GetDkimSigners();

            //*** Create pipeline ***
            //Create TransformBlock that gets table of client data and make a list of objects from them.
            _parseXmlDataBlock = new TransformBlock <DataTable, List <Mail> >(sendData => ParseXmlData(sendData),
                                                                              new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Config.ParseXmlMaxdop,
                BoundedCapacity        = Config.ParseXmlBufferSize
            });
            //Create TransformBlock that gets a list of client objects, send them email, and stores result in DataTable.
            _sendEmailsBlock = new TransformBlock <List <Mail>, DataTable>(mails => SendEmails(_massMail, mails),
                                                                           new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Config.SendEmailsMaxdop,
                BoundedCapacity        = Config.SendEmailsMaxdop
            });
            //Create BatchBlock that holds several DataTable and then propagates them out as an array.
            _batchResultBlock = new BatchBlock <DataTable>(Config.BatchSize,
                                                           new GroupingDataflowBlockOptions
            {
                BoundedCapacity = Config.BatchSize
            });
            //Create ActionBlock that writes result into DB
            _writeResultsBlock = new ActionBlock <DataTable[]>(results => WriteResults(_massMail, results),
                                                               new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = 1
            });

            //*** Build pipeline ***
            // POST --> _parseXmlDataBlock --> _sendEmailsBlock --> _batchResultBlock --> _writeResultsBlock
            _parseXmlDataBlock.LinkTo(_sendEmailsBlock);
            _sendEmailsBlock.LinkTo(_batchResultBlock);
            _batchResultBlock.LinkTo(_writeResultsBlock);

            _parseXmlDataBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)_sendEmailsBlock).Fault(t.Exception);
                }
                else
                {
                    _sendEmailsBlock.Complete();
                }
            });
            _sendEmailsBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)_batchResultBlock).Fault(t.Exception);
                }
                else
                {
                    _batchResultBlock.Complete();
                }
            });
            _batchResultBlock.Completion.ContinueWith(t =>
            {
                if (t.IsFaulted)
                {
                    ((IDataflowBlock)_writeResultsBlock).Fault(t.Exception);
                }
                else
                {
                    _writeResultsBlock.Complete();
                }
            });
        }
示例#45
0
        /// <summary>
        /// Load readers from database.
        /// </summary>
        public void LoadReaders()
        {
            int       maxMfn = Connection.GetMaxMfn(Database);
            const int delta  = 1000;

            DataflowLinkOptions linkOptions = new DataflowLinkOptions
            {
                PropagateCompletion = true
            };

            TransformBlock <string, MarcRecord> parseRecordBlock
                = new TransformBlock <string, MarcRecord>
                  (
                      line => _ParseRecord(line),
                      new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism    = 6,
                SingleProducerConstrained = true
            }
                  );

            TransformBlock <MarcRecord, ReaderInfo> parseReaderBlock
                = new TransformBlock <MarcRecord, ReaderInfo>
                  (
                      record => _ParseReader(record),
                      new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 4
            }
                  );

            ActionBlock <ReaderInfo> analyzeReaderBlock
                = new ActionBlock <ReaderInfo>
                  (
                      reader => _AnalyzeReader(reader),
                      new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 4
            }
                  );

            parseRecordBlock.LinkTo
            (
                parseReaderBlock,
                linkOptions
            );

            parseReaderBlock.LinkTo
            (
                analyzeReaderBlock,
                linkOptions
            );

            DateTime today = DateTime.Today;

            _debtorManager
                = new DebtorManager(Connection)
                {
                FromDate = today.AddYears(-1),
                ToDate   = today.AddMonths(-1)
                };
            _debtorManager.SetupDates();

            for (int offset = 1; offset < maxMfn; offset += delta)
            {
                WriteLine
                (
                    "Загружается: {0} из {1}",
                    offset - 1,
                    maxMfn - 1
                );

                int portion = Math.Min(delta, maxMfn - offset);

                int[] list = Enumerable.Range(offset, portion)
                             .ToArray();
                string[] lines = _ReadRawRecords(list);
                foreach (string line in lines)
                {
                    parseRecordBlock.Post(line);
                }
            }
            parseRecordBlock.Complete();

            analyzeReaderBlock.Completion.Wait();

            Readers.CompleteAdding();
            Debtors.CompleteAdding();
        }
示例#46
0
        private static async Task <UsageInformation> GetUsageInformation(IEnumerable <FileInfo> files)
        {
            var result = new UsageInformation();

            var multipleInstancesOption = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount
            };
            var singleInstanceOption = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 1
            };

            var getApisBlock     = new TransformBlock <FileInfo, UsageInformation>(GetApisUsedByAssembly, multipleInstancesOption);
            var collectApisBlock = new ActionBlock <UsageInformation>(assemblyInfo =>
            {
                if (assemblyInfo != null)
                {
                    foreach (var targetFramework in assemblyInfo.TargetFrameworks)
                    {
                        result.TargetFrameworks.Add(targetFramework);
                    }

                    foreach (var version in assemblyInfo.Versions)
                    {
                        result.Versions.Add(version);
                    }

                    foreach (var apisByAssembly in assemblyInfo.MemberReferences)
                    {
                        if (!result.MemberReferences.TryGetValue(apisByAssembly.Key, out HashSet <string> allApis))
                        {
                            allApis = new HashSet <string>();
                            result.MemberReferences[apisByAssembly.Key] = allApis;
                        }

                        foreach (var api in apisByAssembly.Value)
                        {
                            allApis.Add(api);
                        }
                    }
                }
            },
                                                                      singleInstanceOption);

            var linkOptions = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            getApisBlock.LinkTo(collectApisBlock, linkOptions);

            foreach (var file in files)
            {
                await getApisBlock.SendAsync(file);
            }

            getApisBlock.Complete();

            await Task.WhenAll(getApisBlock.Completion, collectApisBlock.Completion);

            return(result);
        }
示例#47
0
        public Task UploadBlobsAsync(AwsManager awsManager, ISourceBlock<Tuple<IFileFingerprint, AnnotatedPath>> uniqueBlobBlock,
            IReadOnlyDictionary<string, string> knowObjects, CancellationToken cancellationToken)
        {
            var blobCount = 0;
            var blobTotalSize = 0L;

            var builderBlock = new TransformBlock<Tuple<IFileFingerprint, AnnotatedPath>, S3Blobs.IUploadBlobRequest>(
                tuple =>
                {
                    string etag;
                    var exists = knowObjects.TryGetValue(tuple.Item1.Fingerprint.Key(), out etag);

                    //Debug.WriteLine($"{tuple.Item1.FullFilePath} {(exists ? "already exists" : "scheduled for upload")}");

                    if (exists)
                    {
                        // We can't check multipart uploads this way since we don't know the size
                        // of the individual parts.
                        if (etag.Contains("-"))
                        {
                            Debug.WriteLine($"{tuple.Item1.FullFilePath} is a multi-part upload with ETag {etag} {tuple.Item1.Fingerprint.Key().Substring(0, 12)}");

                            return null;
                        }

                        var expectedETag = tuple.Item1.Fingerprint.S3ETag();

                        if (string.Equals(expectedETag, etag, StringComparison.InvariantCultureIgnoreCase))
                            return null;

                        Console.WriteLine($"ERROR: {tuple.Item1.FullFilePath} tag mismatch {etag}, expected {expectedETag} {tuple.Item1.Fingerprint.Key().Substring(0, 12)}");
                    }

                    var request = awsManager.BuildUploadBlobRequest(tuple);

                    if (null == request)
                        return null;

                    Interlocked.Increment(ref blobCount);

                    Interlocked.Add(ref blobTotalSize, request.FileFingerprint.Fingerprint.Size);

                    return request;
                },
                new ExecutionDataflowBlockOptions { CancellationToken = cancellationToken, MaxDegreeOfParallelism = Environment.ProcessorCount });

            var uploader = new ActionBlock<S3Blobs.IUploadBlobRequest>(
                blob => UploadBlobAsync(awsManager, blob, cancellationToken),
                new ExecutionDataflowBlockOptions
                {
                    MaxDegreeOfParallelism = 4,
                    CancellationToken = cancellationToken
                });

            builderBlock.LinkTo(uploader, new DataflowLinkOptions { PropagateCompletion = true }, r => null != r);
            builderBlock.LinkTo(DataflowBlock.NullTarget<S3Blobs.IUploadBlobRequest>());

            uniqueBlobBlock.LinkTo(builderBlock, new DataflowLinkOptions { PropagateCompletion = true });

            var tasks = new List<Task>();

#if DEBUG
            var uploadDoneTask = uploader.Completion
                .ContinueWith(
                    _ => Debug.WriteLine($"Done uploading blobs: {blobCount} items {SizeConversion.BytesToGiB(blobTotalSize):F2}GiB"), cancellationToken);

            tasks.Add(uploadDoneTask);
#endif

            tasks.Add(uploader.Completion);

            return Task.WhenAll(tasks);
        }
示例#48
0
        static void Main(string[] args)
        {
            // Test data
            args = new string[] { "C:\\Development\\SonDar\\Paragon\\GuardAnalyzer", "Preview", "Default", "Example997*.cs" };
            // arg0 : path to folder
            string pathToStartFolder = args[0];
            // arg1 : work mode
            WorkMode mode;

            if (!Enum.TryParse(args[1], out mode))
            {
                throw new Exception("Unknown Mode [args[1] = " + mode + "]");
            }
            // arg2 : changeModel file (Optional)
            string path = Directory.GetCurrentDirectory() + "\\changes.txt";

            if (args.Length > 2 && !args[2].Equals("Default"))
            {
                path = args[2];
            }
            // arg3 - wirdcard(Optional)
            string wildcard = "*.cs";

            if (args.Length > 3)
            {
                wildcard = args[3];
            }
            // Blocks list
            TransformBlock <string, ArrayList> fileListBuilder = new TransformBlock <string, ArrayList>
                                                                     (domainFolderName => (new FileListBuilder()).ParseDirectory(domainFolderName, wildcard));
            TransformBlock <ArrayList, ChangeModel> guardAnalyzer = new TransformBlock <ArrayList, ChangeModel>
                                                                        (fileList => (new GuardAnalyzer()).Prepare(fileList));
            TransformBlock <string, ChangeModel> loader = new TransformBlock <string, ChangeModel>
                                                              (fileName => ChangeModel.Load(fileName));
            TransformBlock <ChangeModel, ChangeModel> printer = new TransformBlock <ChangeModel, ChangeModel>
                                                                    (model => model.Print());
            ActionBlock <ChangeModel> writer   = new ActionBlock <ChangeModel>(tempModel => tempModel.Save(path));
            ActionBlock <ChangeModel> commiter = new ActionBlock <ChangeModel>(tempModel => (new GuardAnalyzer()).Commit(tempModel));

            //Start
            // Preview - fileListBuilder->guardAnalyzer->printer->writer
            // Commit  - loader->printer->commiter
            // Forse   - fileListBuilder->guardAnalyzer->printer->commiter
            Logger.Log("Preview : ");
            if (mode == WorkMode.Preview || mode == WorkMode.Force)
            {
                fileListBuilder.LinkTo(guardAnalyzer);
                fileListBuilder.Completion.ContinueWith(task => guardAnalyzer.Complete());

                guardAnalyzer.LinkTo(printer);
                guardAnalyzer.Completion.ContinueWith(task => printer.Complete());
                if (mode != WorkMode.Force)
                {
                    printer.LinkTo(writer);
                    printer.Completion.ContinueWith(task => writer.Complete());
                    fileListBuilder.Post(pathToStartFolder);
                    fileListBuilder.Complete();
                    writer.Completion.Wait();
                }
            }
            if (mode == WorkMode.Commit || mode == WorkMode.Force)
            {
                if (mode != WorkMode.Force)
                {
                    loader.LinkTo(printer);
                    loader.Completion.ContinueWith(task => printer.Complete());
                }
                printer.LinkTo(commiter);
                printer.Completion.ContinueWith(task => commiter.Complete());
                if (mode != WorkMode.Force)
                {
                    loader.Post(path);
                    loader.Complete();
                }
                else
                {
                    fileListBuilder.Post(pathToStartFolder);
                    fileListBuilder.Complete();
                }
                commiter.Completion.Wait();
            }

            Console.ReadKey();
        }
示例#49
0
        static void SweepCommandLine(string outputFile, string vwExe, string dataFile, string outModelDir, int numProcs = 30)
        {
            var bags = new[] { 1, 2, 4, 6, 8, 10 }.Select(a => "--bag " + a);
            var softmaxes = new[] { 0, 1, 2, 4, 8, 16 }.Select(a => "--softmax --lambda " + a);
            var epsilons = new[] { .2f }.Select(a => "--epsilon " + a);
            var covers = new[] { 1, 2, 4, 6, 8, 10 }.Select(a => "--cover " + a);

            var arguments = Util.Expand(
                epsilons.Union(bags).Union(softmaxes).Union(covers),
                new[] { "--cb_type ips", "--cb_type mtr", "--cb_type dr" },
                new[] { "--marginal KG", "--marginal G", "--marginal K", "" },
                new[] { 0.0002, 0.005, 0.01, 0.1 }.Select(l => string.Format(CultureInfo.InvariantCulture, "-l {0}", l))
                )
                            .Select((a, i) => $"--cb_explore_adf --ignore B --ignore C --ignore D --ignore E --ignore F --ignore H --ignore R -b 18 --power_t 0 {a} -d {dataFile} -f {outModelDir}\\{i}.model -c")
                            .ToList();

            Directory.CreateDirectory(outModelDir);
            File.WriteAllLines(outputFile, arguments);

            int numFinishedProcessing = 0;

            var inputBlock   = new TransformBlock <int, int>(i => i);
            var processBlock = new ActionBlock <int>(i =>
            {
                var startTime = DateTime.UtcNow;
                var p         = Process.Start(new ProcessStartInfo
                {
                    FileName  = vwExe,
                    Arguments = arguments[i],
                    RedirectStandardOutput = true,
                    RedirectStandardError  = true,
                    UseShellExecute        = false,
                    CreateNoWindow         = true
                });

                string output = p.StandardOutput.ReadToEnd();
                string error  = p.StandardError.ReadToEnd();
                File.WriteAllText($"{outModelDir}\\{i}.output", $"{startTime}\r\n{arguments[i]}\r\n{output}\r\n{error}");
                p.WaitForExit();

                int numFinished = Interlocked.Increment(ref numFinishedProcessing);

                Console.WriteLine($"Finished: {numFinishedProcessing} / {arguments.Count}");
            },
                                                     new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = numProcs, BoundedCapacity = numProcs
            });

            inputBlock.LinkTo(processBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });

            var input = inputBlock.AsObserver();

            for (int i = 0; i < arguments.Count; i++)
            {
                input.OnNext(i);
            }
            input.OnCompleted();

            processBlock.Completion.Wait();
        }
        public void InitializeDataflow()
        {
            _config = GlobalClaimWakeUp.Container.Resolve<ClaimProcessorConfiguration>();
            Logger.Info("======================Initializing Dataflow=================================");
            
            GlobalClaimWakeUp.PostedClaims.Clear();
            if (_config.ReopeningClaims)
            {
                GlobalClaimWakeUp.GeniusXPolicyState.Clear();
                GlobalClaimWakeUp.NameReferences.Clear();
                GlobalClaimWakeUp.PostedClaims.Clear();
            }
            

            _attachClaimToPolicy = new TransformBlock<Claim, Claim>(
                c => AttachClaimToPolicyBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });


            _executeExcessAndDeductibles = new TransformBlock<Claim, Claim>(
                    c => ExcessAndDeductiblesBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            _createTransferNameRequests = new TransformBlock<Claim, Claim>(
                    c => CreateNameTransferRequestsBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            // We don't need to create a review task for the service
            //_createClaimReviewTask = new TransformBlock<Claim, Claim>(
            //        c => CreateReviewTaskBlock.Execute(c),
            //        new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            _createTransferClaimRequest = new TransformBlock<Claim, Claim>(
                    c => CreateClaimTransferRequestsBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            _clearLocks = new TransformBlock<Claim, Claim>(
                c => ClearLocksBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            _validateClaim = new TransformBlock<Claim, Claim>(
                    c => ValidateClaimBlock.Execute(c),
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1 });

            _setCustomCode18 = new TransformBlock<Claim, Claim>(
                    c =>
                        {
                            SetCustomCode18Block.Execute(c);
                            UpdateMigrationStatus(c);
                            return c;
                        },
                    new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2 });

            _logClaimState = new ActionBlock<Claim>(
                c =>
                    {
                        Logger.InfoFormat("Claim status is\r\n{0}", JObject.FromObject(c));
                    });
            
            //_attachClaimToPolicy.LinkTo(_createClaimReviewTask, new DataflowLinkOptions { PropagateCompletion = true });
            //_createClaimReviewTask.LinkTo(_executeExcessAndDeductibles, new DataflowLinkOptions { PropagateCompletion = true });
            // Created next line to avoid the review task creation.
            _attachClaimToPolicy.LinkTo(_executeExcessAndDeductibles, new DataflowLinkOptions { PropagateCompletion = true });
            _executeExcessAndDeductibles.LinkTo(_createTransferClaimRequest, new DataflowLinkOptions { PropagateCompletion = true });
            _createTransferClaimRequest.LinkTo(_createTransferNameRequests, new DataflowLinkOptions { PropagateCompletion = true });
            _createTransferNameRequests.LinkTo(_validateClaim, new DataflowLinkOptions { PropagateCompletion = true });
            _validateClaim.LinkTo(_clearLocks, new DataflowLinkOptions { PropagateCompletion = true });
            _clearLocks.LinkTo(_setCustomCode18, new DataflowLinkOptions { PropagateCompletion = true });
            _setCustomCode18.LinkTo(_logClaimState, new DataflowLinkOptions {PropagateCompletion = true});
        }
示例#51
0
        private static void LinearizeDatabase(Args arguments)
        {
            // and a couple of checks here
            Contract.Requires(arguments.InputDirectory != null, "You must specify an input directory");
            Contract.Requires(Directory.Exists(arguments.InputDirectory), "The input directory must exist");
            var collectedArtifacts = new MultiValueDictionary <int, MLArtifact>();
            var currentTicks       = Environment.TickCount;
            var linearFile         = $"{Path.Combine(arguments.InputDirectory, $"{Convert.ToString(currentTicks)}.csv")}";
            var linearOutput       = TextWriter.Synchronized(new StreamWriter(linearFile));

            s_logger.Info($"Linearizing to [{linearFile}]");
            // write the headers
            MLArtifact.WriteColumnsToStream(linearOutput);
            // so now we are ready to linearize
            var linearizeBlock = new TransformBlock <LinearizeArtifactsInput, TimedActionResult <LinearizeArtifactsOutput> >(i =>
            {
                var action = new LinearizeArtifacts(linearOutput);
                return(action.PerformAction(i));
            },
                                                                                                                             new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = arguments.AppConfig.ConcurrencyConfig.MaxArtifactLinearizationTasks
            }
                                                                                                                             );
            var collectLinearResultsBlock = new ActionBlock <TimedActionResult <LinearizeArtifactsOutput> >(i =>
            {
                if (i.ExecutionStatus)
                {
                    collectedArtifacts.Add(i.Result.NumQueues, i.Result.Linear);
                }
            },
                                                                                                            // enforce serial
                                                                                                            new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 1
            }
                                                                                                            );

            // connect
            linearizeBlock.LinkTo(collectLinearResultsBlock, new DataflowLinkOptions {
                PropagateCompletion = true
            });
            // and post the tasks
            var posted = 0;

            foreach (var hashDir in Directory.EnumerateDirectories(arguments.InputDirectory))
            {
                linearizeBlock.Post(new LinearizeArtifactsInput(hashDir));
                ++posted;
            }
            s_logger.Info($"Posted {posted} linearizing tasks, waiting...");
            linearizeBlock.Complete();
            // and wait
            collectLinearResultsBlock.Completion.Wait();
            // and close...
            linearOutput.Close();
            // now, scale to create the samples...
            s_logger.Info($"Creating {arguments.NumSamples} samples of size {arguments.SampleSize}");
            var scale = new Dictionary <int, int>();

            foreach (var entry in collectedArtifacts)
            {
                var queueCount = entry.Key;
                var entryCount = entry.Value.Count;
                var proportion = 1.0 * Math.BigMul(entryCount, arguments.SampleSize) / (1.0 * posted);
                scale[queueCount] = (int)Math.Ceiling(proportion);
            }
            // we have the scale, lets post tasks here
            var createSampleBlocks = new ActionBlock <SampleArtifactsInput>(i =>
            {
                var action = new SampleArtifacts();
                action.PerformAction(i);
            },
                                                                            // one per each sample
                                                                            new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = arguments.NumSamples
            }
                                                                            );

            // post some tasks in here
            for (var i = 0; i < arguments.NumSamples; ++i)
            {
                createSampleBlocks.Post(new SampleArtifactsInput($"{Path.Combine(arguments.InputDirectory, $"{Convert.ToString(currentTicks)}-sample{i}.csv")}", scale, collectedArtifacts));
            }
            // and wait...
            createSampleBlocks.Complete();
            createSampleBlocks.Completion.Wait();
            // done...
        }
示例#52
0
        public static void Start(List <string> urls)
        {
            StringComparison   comparison = StringComparison.InvariantCultureIgnoreCase;
            Predicate <string> linkFilter = link =>
                                            link.IndexOf(".aspx", comparison) != -1 ||
                                            link.IndexOf(".php", comparison) != -1 ||
                                            link.IndexOf(".htm", comparison) != -1 ||
                                            link.IndexOf(".html", comparison) != -1 ||
                                            link.EndsWith(".com", comparison) ||
                                            link.EndsWith(".net", comparison);
            Predicate <string> imgFilter = url =>
                                           url.EndsWith(".jpg", comparison) ||
                                           url.EndsWith(".png", comparison) ||
                                           url.EndsWith(".gif", comparison);

            var downloader = new TransformBlock <string, string>(
                async(url) =>
            {
                // using IOCP the thread pool worker thread does return to the pool
                var client    = new HttpClient();
                string result = await client.GetStringAsync(url);
                return(result);
            }, new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism = 2
            });

            var contentBroadcaster = new BroadcastBlock <string>(s => s);

            var linkParser = new TransformManyBlock <string, string>(
                (html) =>
            {
                var output = new List <string>();
                var links  = _linkRegexHRef.Matches(html);
                foreach (Match item in links)
                {
                    var value = item.Groups["LINK"].Value;
                    output.Add(value);
                }
                return(output);
            });


            var imgParser = new TransformManyBlock <string, string>(
                (html) =>
            {
                var output = new List <string>();
                var images = _imgRegex.Matches(html);
                foreach (Match item in images)
                {
                    var value = item.Groups["IMG"].Value;
                    output.Add(value);
                }
                return(output);
            });

            var writer = new ActionBlock <string>(async url =>
            {
                var client = new HttpClient();
                // using IOCP the thread pool worker thread does return to the pool
                byte[] buffer   = await client.GetByteArrayAsync(url);
                string fileName = Path.GetFileName(url);

                Directory.CreateDirectory(AppDomain.CurrentDomain.BaseDirectory + @"\Images\");
                string name = AppDomain.CurrentDomain.BaseDirectory + @"\Images\" + fileName;

                using (Stream srm = File.OpenWrite(name))
                {
                    await srm.WriteAsync(buffer, 0, buffer.Length);
                }
            });

            var linkBroadcaster = new BroadcastBlock <string>(s => s);

            IDisposable disposeAll = new CompositeDisposable(
                // from [downloader] to [contentBroadcaster]
                downloader.LinkTo(contentBroadcaster),
                // from [contentBroadcaster] to [imgParser]
                contentBroadcaster.LinkTo(imgParser),
                // from [contentBroadcaster] to [linkParserHRef]
                contentBroadcaster.LinkTo(linkParser),
                // from [linkParser] to [linkBroadcaster]
                linkParser.LinkTo(linkBroadcaster),
                // conditional link to from [linkBroadcaster] to [downloader]
                linkBroadcaster.LinkTo(downloader, linkFilter),
                // from [linkBroadcaster] to [writer]
                linkBroadcaster.LinkTo(writer, imgFilter),
                // from [imgParser] to [writer]
                imgParser.LinkTo(writer));

            foreach (var url in urls)
            {
                downloader.Post(url);
            }

            Console.WriteLine("Press any key to exit...");
            Console.ReadLine();
            downloader.Complete();
            disposeAll.Dispose();
        }
示例#53
0
        /// <summary>
        /// Initializes a new instance of the <see cref="MultiplexerClientChannel"/> class.
        /// </summary>
        /// <param name="builder">The channel builder.</param>
        /// <param name="count">The number of channels to create.</param>
        /// <param name="inputBufferSize">The input buffer bounded capacity.</param>
        /// <param name="outputBufferSize">The output buffer bounded capacity.</param>
        /// <param name="channelCommandProcessor">The workflow for processing commands.</param>
        /// <exception cref="System.ArgumentNullException"></exception>
        /// <exception cref="System.ArgumentOutOfRangeException"></exception>
        public MultiplexerClientChannel(
            IEstablishedClientChannelBuilder builder,
            int count            = 5,
            int inputBufferSize  = 1,
            int outputBufferSize = 1,
            IChannelCommandProcessor channelCommandProcessor = null)
        {
            if (builder == null)
            {
                throw new ArgumentNullException(nameof(builder));
            }
            if (count <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(count));
            }

            // Create observable collections to allow synchronization
            var channelCreatedHandlers         = new ObservableCollection <Func <ChannelInformation, Task> >();
            var channelDiscardedHandlers       = new ObservableCollection <Func <ChannelInformation, Task> >();
            var channelCreationFailedHandlers  = new ObservableCollection <Func <FailedChannelInformation, Task <bool> > >();
            var channelOperationFailedHandlers = new ObservableCollection <Func <FailedChannelInformation, Task <bool> > >();

            ChannelCreatedHandlers         = channelCreatedHandlers;
            ChannelDiscardedHandlers       = channelDiscardedHandlers;
            ChannelCreationFailedHandlers  = channelCreationFailedHandlers;
            ChannelOperationFailedHandlers = channelOperationFailedHandlers;

            // Global input buffers
            var inputOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity        = inputBufferSize,
                MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded
            };

            _inputMessageBufferBlock      = new BufferBlock <Message>(inputOptions);
            _inputNotificationBufferBlock = new BufferBlock <Notification>(inputOptions);
            _channelCommandProcessor      = channelCommandProcessor ?? new ChannelCommandProcessor();
            // Uses the same channel command processor for all instances
            // to avoid problems with commands responses being received on different channels.
            builder.ChannelBuilder.WithChannelCommandProcessor(_channelCommandProcessor);
            _processCommandTransformBlock = new TransformBlock <Command, Command>(c =>
            {
                if (_channelCommandProcessor.TrySubmitCommandResult(c))
                {
                    return(null);
                }
                return(c);
            },
                                                                                  inputOptions);
            _inputCommandBufferBlock = new BufferBlock <Command>(inputOptions);
            _processCommandTransformBlock.LinkTo(_inputCommandBufferBlock, c => c != null);
            _processCommandTransformBlock.LinkTo(DataflowBlock.NullTarget <Command>(), c => c == null);

            // The global output buffer
            _outputBufferBlock = new BufferBlock <Envelope>(new DataflowBlockOptions()
            {
                BoundedCapacity = outputBufferSize
            });

            // An output action block per channel
            _outputActionBlocks = new ActionBlock <Envelope> [count];
            _channels           = new IOnDemandClientChannel[count];
            _listeners          = new IChannelListener[count];

            for (var i = 0; i < _channels.Length; i++)
            {
                // Add an instance suffix to the builder
                var currentBuilder = builder
                                     .Copy()
                                     .WithInstance($"{builder.Instance}-{i+1}");
                var channel = new OnDemandClientChannel(currentBuilder);

                // Synchronize the handlers
                AttachCollection(channelCreatedHandlers, channel.ChannelCreatedHandlers);
                AttachCollection(channelDiscardedHandlers, channel.ChannelDiscardedHandlers);
                AttachCollection(channelCreationFailedHandlers, channel.ChannelCreationFailedHandlers);
                AttachCollection(channelOperationFailedHandlers, channel.ChannelOperationFailedHandlers);

                // Setup the listener for the channel
                _listeners[i] = new DataflowChannelListener(
                    _inputMessageBufferBlock,
                    _inputNotificationBufferBlock,
                    _processCommandTransformBlock);

                // Create a single bounded action block for each channel
                _outputActionBlocks[i] = new ActionBlock <Envelope>(async e => await SendToChannelAsync(channel, e).ConfigureAwait(false),
                                                                    new ExecutionDataflowBlockOptions()
                {
                    BoundedCapacity = 1, MaxDegreeOfParallelism = 1
                });
                _outputBufferBlock.LinkTo(_outputActionBlocks[i], new DataflowLinkOptions()
                {
                    PropagateCompletion = true
                });

                _channels[i] = channel;
            }

            _semaphore = new SemaphoreSlim(1, 1);
        }
        public async Task TestEncapsulate_EncapsulateBoundedTarget()
        {
            // source->||BoundedTransform->buffer||->sink

            int messagesReceived = 0;
            var transform = new TransformBlock<int, int>(x => {
                messagesReceived++;
                return x;
            }, new ExecutionDataflowBlockOptions() { BoundedCapacity = 1 });

            var buffer = new BufferBlock<int>();
            transform.LinkTo(buffer);
            var ignored = transform.Completion.ContinueWith(completion => buffer.Complete(), TaskScheduler.Default);

            IPropagatorBlock<int, int> encapsulated = DataflowBlock.Encapsulate(transform, buffer);
            encapsulated.LinkTo(new ActionBlock<int>(x => { }));

            var source = new BufferBlock<int>();
            source.LinkTo(encapsulated);
            ignored = source.Completion.ContinueWith(completion => encapsulated.Complete(), TaskScheduler.Default);

            // Feed
            const int messagesSent = 10;
            source.PostRange(0, messagesSent);
            source.Complete();

            await encapsulated.Completion;
            Assert.Equal(messagesReceived, messagesSent);
        }
示例#55
0
        public async Task DoNetwork()
        {
            OpenCL.IsEnabled = false;
            var cacheFile = System.Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData) + "\\stitch_cache.json";

            State state = new State();

            try
            {
                state = JsonConvert.DeserializeObject <State>(File.ReadAllText(cacheFile));
                Console.WriteLine($"Using state from {cacheFile}");
            } catch
            {
                Console.WriteLine("Couldn't load cache");
            }
            //state = new State();
            AppState = state;


            var workerPool = new LimitedConcurrencyLevelTaskScheduler(Math.Max(Environment.ProcessorCount - 2, 1)); // / 2, 1));
            //var workerPool = new LimitedConcurrencyLevelTaskScheduler(1);
            var snapshotState = new ActionBlock <State>((s) =>
            {
                var content = "";
                s.Lock(lockedState => content = JsonConvert.SerializeObject(lockedState));
                WriteAllTextWithBackup(cacheFile, content);
                this.Dispatcher.Invoke(() => Joins.ItemsSource = AppState.Joins);
            }, new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 1
            });

            state.ChangeListener = snapshotState;

            var blockOptions = new ExecutionDataflowBlockOptions
            {
                TaskScheduler          = workerPool,
                MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded // Handled by underlying scheduler
            };

            //var sourceDir = "C:/Users/Xavier/Source/ori-tracker/MapStitcher/Screenshots";
            var sourceDir = "C:/Users/Xavier/Source/ori-tracker/MapStitcher/4KScreenshots";

            /*
             * IMagickImage image1 = new MagickImage(System.IO.Path.GetFullPath($"{sourceDir}/../Temp/forlorn-1.png"));
             * image1 = image1.Clone();
             * var settings = new MorphologySettings
             * {
             *  Channels = Channels.Alpha,
             *  Method = MorphologyMethod.Distance,
             *  Kernel = Kernel.Euclidean,
             *  KernelArguments = "1,50!"
             * };
             *
             * image1.Alpha(AlphaOption.Set);
             * image1.VirtualPixelMethod = VirtualPixelMethod.Transparent;
             * image1.Morphology(settings);
             * image1.Write(System.IO.Path.GetFullPath($"{sourceDir}/../Temp/forlorn-test.png"));
             */

            /*
             * MagickImage image1 = new MagickImage($"{sourceDir}/sorrow-1.png");
             * MagickImage image2 = new MagickImage($"{sourceDir}/sorrow-2.png");
             */

            /*
             * var sourceFiles = new List<string>
             * {
             *  $"{sourceDir}\\387290_20180314160604_1.png",
             * };
             */
            var sourceFiles = Directory.GetFiles(sourceDir, "*.png");

            /*
             * var sourceFiles = new List<string>
             * {
             *  $"{sourceDir}\\forlorn-1.png",
             *  $"{sourceDir}\\forlorn-2.png",
             *  $"{sourceDir}\\forlorn-3.png",
             * };
             */
            //state.ClearJoins();

            /*
             * state.ClearNeedle(new NeedleKey { Key = $"{sourceDir}/forlorn-3.png", Gravity = Gravity.West });
             * state.ClearJoin($"{sourceDir}/forlorn-2.png", $"{sourceDir}/forlorn-3.png");
             * state.ClearJoin($"{sourceDir}/forlorn-1.png", $"{sourceDir}/forlorn-3.png");
             * state.ClearJoin($"{sourceDir}/forlorn-2.png", $"{sourceDir}/forlorn-1.png");
             */
            this.Dispatcher.Invoke(() => SourceImages.ItemsSource = SourceImages2.ItemsSource = sourceFiles);
            this.Dispatcher.Invoke(() => Joins.ItemsSource        = AppState.Joins);
            UpdateUI();

            var loadFromDiskBlock = new TransformBlock <string, string>(path =>
            {
                // TODO: Make this a load and crop task
                var task = new StitchTask($"Load and crop {System.IO.Path.GetFileName(path)}");
                this.Dispatcher.Invoke(() => Tasks.Add(task));
                state.GetOrAddImage(path, () =>
                {
                    var image        = new MagickImage(path);
                    var originalSize = new Size(image.Width, image.Height);
                    int sideMargin   = (int)(image.Width * 0.15); // The sides have a subtle animated mask over them. 280px wide on 1920px resolution. Crop them out.
                    int topMargin    = (int)(image.Height * 0.17);
                    int bottomMargin = (int)(image.Height * 0.15);
                    var bounds       = new MagickGeometry(sideMargin, topMargin, image.Width - sideMargin * 2, image.Height - bottomMargin - topMargin);
                    image.Crop(bounds);
                    image.RePage();
                    //image.Write("C:\\Users\\Xavier\\Source\\ori-tracker\\MapStitcher\\Temp\\" + System.IO.Path.GetFileName(path));
                    return(image);
                });
                task.Complete("Done", false);
                return(path);
            }, blockOptions);

            var gravities = new TransformManyBlock <string, NeedleKey>(path =>
            {
                return(allGravities.Select(g => new NeedleKey()
                {
                    Key = path, Gravity = g
                }));
            }, blockOptions);

            var findNeedleBlock = new TransformBlock <NeedleKey, NeedleKey>(needle =>
            {
                var task = new FindNeedleTask(state, needle);
                this.Dispatcher.Invoke(() => Tasks.Add(task));
                task.Run();
                return(needle);
            }, blockOptions);

            var findJoinBlock = new TransformBlock <SearchKey, string>(t =>
            {
                var haystack = t.Item1;
                var needle   = t.Item2;

                var task = new SearchTask(state, haystack, needle);
                this.Dispatcher.Invoke(() => Tasks.Add(task));

                task.Run();

                completedSearchTasks++;
                return(haystack); // TODO: Figure out best thing to propagate. Maybe when match found?
            }, blockOptions);

            var broadcaster = new BroadcastBlock <string>(null);
            var cartesian   = new CartesianProductBlock <string, NeedleKey>();

            var propagate = new DataflowLinkOptions {
                PropagateCompletion = true
            };
            var headBlock = loadFromDiskBlock;

            headBlock.LinkTo(broadcaster, propagate);
            broadcaster.LinkTo(gravities, propagate);
            gravities.LinkTo(findNeedleBlock, propagate);

            // Don't propagate completion from left/right sources for cartesian join. It should
            // complete when _both_ are done (which is it's default behaviour)
            broadcaster.LinkTo(cartesian.Left, propagate);
            findNeedleBlock.LinkTo(cartesian.Right, propagate);

            var countTotals = new TransformManyBlock <Tuple <string, NeedleKey>, SearchKey>(t =>
            {
                var haystack = t.Item1;
                var needle   = t.Item2;
                var none     = Enumerable.Empty <SearchKey>();

                if (haystack == needle.Key || !state.GetNeedle(needle).MeetsThreshold())
                {
                    return(none);
                }

                var existingJoins  = state.Joins;
                var connectedJoins = new HashSet <HashSet <string> >();

                foreach (var join in existingJoins)
                {
                    var found = false;
                    foreach (var connectedSubset in connectedJoins)
                    {
                        if (connectedSubset.Contains(join.Image1) || connectedSubset.Contains(join.Image2))
                        {
                            connectedSubset.Add(join.Image1);
                            connectedSubset.Add(join.Image2);
                            found = true;
                            break;
                        }
                    }

                    if (!found)
                    {
                        var newSubset = new HashSet <string>();
                        newSubset.Add(join.Image1);
                        newSubset.Add(join.Image2);
                        connectedJoins.Add(newSubset);
                    }
                }
                connectedJoins.Aggregate(new HashSet <HashSet <string> >(), (acc, x) => {
                    var found = false;
                    foreach (var connectedSubset in acc)
                    {
                        if (connectedSubset.Overlaps(x))
                        {
                            connectedSubset.UnionWith(x);
                            found = true;
                            break;
                        }
                    }
                    if (!found)
                    {
                        acc.Add(x);
                    }
                    return(acc);
                });

                if (connectedJoins.Any(x => x.Contains(haystack) && x.Contains(needle.Key)))
                {
                    Console.WriteLine("Two images already connected via transitive joins, skipping");
                    return(none);
                }
                totalSearchTasks++;
                return(Enumerable.Repeat(SearchKey.Create(t.Item1, t.Item2), 1));
            }, new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = 1
            });

            cartesian.LinkTo(countTotals, propagate);
            countTotals.LinkTo(findJoinBlock, propagate);

            var sink = new ActionBlock <string>(s => { });

            findJoinBlock.LinkTo(sink, propagate);

            foreach (var file in sourceFiles)
            {
                headBlock.Post(file);
            }
            headBlock.Complete();

            await sink.Completion.ContinueWith(_ =>
            {
                Console.WriteLine("Pipeline Finished");

                /*
                 * this.Dispatcher.Invoke(() => TaskGrid.ItemsSource = Tasks.Where(x =>
                 * {
                 *  if (x is SearchTask)
                 *  {
                 *      var task = (SearchTask)x;
                 *      return task.Name.Contains("Found");
                 *      return task.searchResult.MeetsThreshold();
                 *      return task.searchResult.Distance < 2500;
                 *  }
                 *  return false;
                 * }));
                 */
                Dictionary <string, Point> completedJoins = new Dictionary <string, Point>();

                var remainingJoins = new List <State.Join>(state.Joins);
                var rejects        = new List <State.Join>();
                var images         = new MagickImageCollection();
                var lastCycleCount = 0;

                var morphologySettings = new MorphologySettings
                {
                    Channels        = Channels.Alpha,
                    Method          = MorphologyMethod.Distance,
                    Kernel          = Kernel.Euclidean,
                    KernelArguments = "1,50!"
                };

                while (remainingJoins.Count > 0 && remainingJoins.Count != lastCycleCount)
                {
                    lastCycleCount = remainingJoins.Count;
                    foreach (var join in remainingJoins)
                    {
                        if (completedJoins.Count == 0)
                        {
                            var tempPath  = System.IO.Path.GetTempFileName();
                            var tempPath2 = System.IO.Path.GetTempFileName();
                            // Initial seed
                            var i1 = state.Image(join.Image1).Clone();
                            var i2 = state.Image(join.Image2).Clone();

                            i1.Alpha(AlphaOption.Set);
                            i1.VirtualPixelMethod = VirtualPixelMethod.Transparent;
                            i1.Morphology(morphologySettings);
                            i1.Write(tempPath);
                            i1.Dispose();
                            i1 = new MagickImage(tempPath);
                            i1.BackgroundColor = new MagickColor(18, 18, 18);

                            i2.Alpha(AlphaOption.Set);
                            i2.VirtualPixelMethod = VirtualPixelMethod.Transparent;
                            i2.Morphology(morphologySettings);
                            i2.Write(tempPath2);
                            i2.Dispose();
                            i2 = new MagickImage(tempPath2);

                            i2.Page = new MagickGeometry($"{ToOffset(join.JoinPoint.X)}{ToOffset(join.JoinPoint.Y)}");
                            images.Add(i1);
                            images.Add(i2);

                            completedJoins.Add(join.Image1, new Point(0, 0));
                            completedJoins.Add(join.Image2, join.JoinPoint);
                            File.Delete(tempPath);
                            File.Delete(tempPath2);
                        }
                        else
                        {
                            Point offset = join.JoinPoint;
                            if (completedJoins.ContainsKey(join.Image1) && completedJoins.ContainsKey(join.Image2))
                            {
                                // NOOP
                            }
                            else if (completedJoins.ContainsKey(join.Image1))
                            {
                                completedJoins.TryGetValue(join.Image1, out offset);

                                var tempPath  = System.IO.Path.GetTempFileName();
                                var i2        = state.Image(join.Image2).Clone();
                                var joinPoint = new Point(join.JoinPoint.X + offset.X, join.JoinPoint.Y + offset.Y);
                                i2.Alpha(AlphaOption.Set);
                                i2.VirtualPixelMethod = VirtualPixelMethod.Transparent;
                                i2.Morphology(morphologySettings);
                                i2.Write(tempPath);
                                //i2.Dispose();
                                //i2 = new MagickImage(tempPath);
                                i2.Page = new MagickGeometry($"{ToOffset(joinPoint.X)}{ToOffset(joinPoint.Y)}");
                                images.Add(i2);
                                File.Delete(tempPath);
                                completedJoins.Add(join.Image2, joinPoint);
                            }
                            else if (completedJoins.ContainsKey(join.Image2))
                            {
                                completedJoins.TryGetValue(join.Image2, out offset);

                                var tempPath  = System.IO.Path.GetTempFileName();
                                var i1        = state.Image(join.Image1).Clone();
                                var joinPoint = new Point(offset.X - join.JoinPoint.X, offset.Y - join.JoinPoint.Y);

                                i1.Alpha(AlphaOption.Set);
                                i1.VirtualPixelMethod = VirtualPixelMethod.Transparent;
                                i1.Morphology(morphologySettings);
                                i1.Write(tempPath);
                                //i1.Dispose();
                                //i1 = new MagickImage(tempPath);

                                i1.Page = new MagickGeometry($"{ToOffset(joinPoint.X)}{ToOffset(joinPoint.Y)}");

                                images.Add(i1);
                                File.Delete(tempPath);
                                completedJoins.Add(join.Image1, joinPoint);
                            }
                            else
                            {
                                rejects.Add(join);
                            }
                        }
                    }
                    remainingJoins = rejects.ToList();
                    rejects.Clear();
                }
                if (images.Any())
                {
                    var merged = images.Merge();

                    //merged.BackgroundColor = new MagickColor(0, 0, 0);
                    //merged.Alpha(AlphaOption.Remove);
                    //merged.Alpha(AlphaOption.Off);
                    merged.Write("C:\\Users\\Xavier\\Source\\ori-tracker\\MapStitcher\\Temp\\map.png");
                    DisplayImage(Viewer, merged);
                    Console.WriteLine("Done Compositing");
                }
            });
        }