public async Task TestLinkToOptions() { foreach (bool consumeToAccept in DataflowTestHelpers.BooleanValues) foreach (bool propagateCompletion in DataflowTestHelpers.BooleanValues) foreach (bool append in DataflowTestHelpers.BooleanValues) foreach (int maxMessages in new[] { DataflowBlockOptions.Unbounded, 1, 2 }) { var wob = consumeToAccept ? new WriteOnceBlock<int>(i => i) : new WriteOnceBlock<int>(null); int result = 0; const int Count = 10; Assert.True(Count % 2 == 0); var targets = Enumerable.Range(0, Count).Select(i => new ActionBlock<int>(_ => Interlocked.Increment(ref result))).ToArray(); var options = new DataflowLinkOptions { MaxMessages = maxMessages, Append = append, PropagateCompletion = propagateCompletion }; for (int i = 0; i < Count / 2; i++) { wob.LinkTo(targets[i], options, f => false); wob.LinkTo(targets[i], options); } wob.Post(1); for (int i = Count / 2; i < Count; i++) { wob.LinkTo(targets[i], options); } await wob.Completion; if (propagateCompletion) { await Task.WhenAll(from target in targets select target.Completion); Assert.Equal(expected: Count, actual: result); } else { // This should never fail, but there is a race such that it won't always // be testing what we want it to test. Doing so would mean waiting // for an arbitrary period of time to ensure something hasn't happened. Assert.All(targets, t => Assert.False(t.Completion.IsCompleted)); } } }
/// <summary> /// Proxy to link <see cref="L7Pdus"/> to target. /// <br/><inheritdoc/> /// </summary> /// <inheritdoc/> public IDisposable LinkTo(ITargetBlock <L7PDU> target, DataflowLinkOptions linkOptions) { return(this.L7Pdus.LinkTo(target, linkOptions)); }
/// <summary> /// Proxy to link <see cref="L4Conversations"/> to target. /// <br/><inheritdoc/> /// </summary> /// <inheritdoc/> public IDisposable LinkTo(ITargetBlock <L4Conversation> target, DataflowLinkOptions linkOptions) { return(this.L4Conversations.LinkTo(target, linkOptions)); }
public DiagnosticPipeline( IHealthReporter healthReporter, IReadOnlyCollection <IObservable <EventData> > inputs, IReadOnlyCollection <IFilter> globalFilters, IReadOnlyCollection <EventSink> sinks, DiagnosticPipelineConfiguration pipelineConfiguration = null, bool disposeDependencies = false, TaskScheduler taskScheduler = null) { Requires.NotNull(healthReporter, nameof(healthReporter)); Requires.NotNull(inputs, nameof(inputs)); Requires.Argument(inputs.Count > 0, nameof(inputs), "There must be at least one input"); Requires.NotNull(sinks, nameof(sinks)); Requires.Argument(sinks.Count > 0, nameof(sinks), "There must be at least one sink"); this.batcherTimerDisposalLock = new object(); this.pipelineConfiguration = pipelineConfiguration ?? new DiagnosticPipelineConfiguration(); taskScheduler = taskScheduler ?? TaskScheduler.Current; // An estimatie how many batches of events to allow inside the pipeline. // We want to be able to process full buffer of events, but also have enough batches in play in case of high concurrency. int MaxNumberOfBatchesInProgress = Math.Max( 5 * this.pipelineConfiguration.MaxConcurrency, this.pipelineConfiguration.PipelineBufferSize / this.pipelineConfiguration.MaxEventBatchSize); this.Inputs = inputs; this.Sinks = sinks; // Just play nice and make sure there is always something to enumerate on this.GlobalFilters = globalFilters ?? new IFilter[0]; this.HealthReporter = healthReporter; this.cancellationTokenSource = new CancellationTokenSource(); var propagateCompletion = new DataflowLinkOptions() { PropagateCompletion = true }; // One disposable for each input subscription. this.inputSubscriptions = new List <IDisposable>(inputs.Count); var inputBuffer = new BufferBlock <EventData>( new DataflowBlockOptions() { BoundedCapacity = this.pipelineConfiguration.PipelineBufferSize, CancellationToken = this.cancellationTokenSource.Token, TaskScheduler = taskScheduler }); this.pipelineHead = inputBuffer; var batcher = new BatchBlock <EventData>( this.pipelineConfiguration.MaxEventBatchSize, new GroupingDataflowBlockOptions() { BoundedCapacity = this.pipelineConfiguration.PipelineBufferSize, CancellationToken = this.cancellationTokenSource.Token, TaskScheduler = taskScheduler } ); inputBuffer.LinkTo(batcher, propagateCompletion); ISourceBlock <EventData[]> sinkSource; FilterAction filterTransform; if (this.GlobalFilters.Count > 0) { filterTransform = new FilterAction( this.GlobalFilters, this.cancellationTokenSource.Token, MaxNumberOfBatchesInProgress, this.pipelineConfiguration.MaxConcurrency, healthReporter, taskScheduler); var globalFiltersBlock = filterTransform.GetFilterBlock(); batcher.LinkTo(globalFiltersBlock, propagateCompletion); sinkSource = globalFiltersBlock; } else { sinkSource = batcher; } bool usingBroadcastBlock = sinks.Count > 1; if (usingBroadcastBlock) { var broadcaster = new BroadcastBlock <EventData[]>( (events) => events?.Select((e) => e.DeepClone()).ToArray(), new DataflowBlockOptions() { BoundedCapacity = MaxNumberOfBatchesInProgress, CancellationToken = this.cancellationTokenSource.Token, TaskScheduler = taskScheduler }); sinkSource.LinkTo(broadcaster, propagateCompletion); sinkSource = broadcaster; } this.outputCompletionTasks = new List <Task>(sinks.Count); foreach (var sink in sinks) { ISourceBlock <EventData[]> outputSource = sinkSource; if (sink.Filters != null && sink.Filters.Count > 0) { filterTransform = new FilterAction( sink.Filters, this.cancellationTokenSource.Token, MaxNumberOfBatchesInProgress, this.pipelineConfiguration.MaxConcurrency, healthReporter, taskScheduler); var filterBlock = filterTransform.GetFilterBlock(); if (usingBroadcastBlock) { var lossReportingPropagator = new LossReportingPropagatorBlock <EventData[]>(this.HealthReporter); sinkSource.LinkTo(lossReportingPropagator, propagateCompletion); lossReportingPropagator.LinkTo(filterBlock, propagateCompletion); } else { sinkSource.LinkTo(filterBlock, propagateCompletion); } outputSource = filterBlock; } else if (usingBroadcastBlock) { var lossReportingPropagator = new LossReportingPropagatorBlock <EventData[]>(this.HealthReporter); sinkSource.LinkTo(lossReportingPropagator, propagateCompletion); outputSource = lossReportingPropagator; } OutputAction outputAction = new OutputAction( sink.Output, this.cancellationTokenSource.Token, MaxNumberOfBatchesInProgress, this.pipelineConfiguration.MaxConcurrency, healthReporter, taskScheduler); var outputBlock = outputAction.GetOutputBlock(); outputSource.LinkTo(outputBlock, propagateCompletion); this.outputCompletionTasks.Add(outputBlock.Completion); } IObserver <EventData> inputBufferObserver = new TargetBlockObserver <EventData>(inputBuffer, this.HealthReporter); foreach (var input in inputs) { this.inputSubscriptions.Add(input.Subscribe(inputBufferObserver)); } this.disposed = false; this.disposeDependencies = disposeDependencies; this.batcherTimer = new Timer( (_) => { try { lock (this.batcherTimerDisposalLock) { if (!this.disposed) { batcher.TriggerBatch(); this.batcherTimer.Change(dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec), period: Timeout.InfiniteTimeSpan); } } } catch { } }, state: null, dueTime: TimeSpan.FromMilliseconds(this.pipelineConfiguration.MaxBatchDelayMsec), period: Timeout.InfiniteTimeSpan); }
public IDisposable LinkTo(ITargetBlock <TileRange> target, DataflowLinkOptions linkOptions) { return(block.LinkTo(target, linkOptions)); }
public async Task Process(CancellationToken cancellationToken) { DataflowLinkOptions linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; DataflowLinkOptions nonPropagatingLinkOptions = new DataflowLinkOptions(); ExecutionDataflowBlockOptions singleItemBlockOptions = new ExecutionDataflowBlockOptions { BoundedCapacity = 1, EnsureOrdered = false }; ExecutionDataflowBlockOptions bufferBlockOptions = new ExecutionDataflowBlockOptions { BoundedCapacity = 100, EnsureOrdered = false }; ExecutionDataflowBlockOptions unboundedBlockOptions = new ExecutionDataflowBlockOptions { EnsureOrdered = false }; GroupingDataflowBlockOptions batchingBlockOptions = new GroupingDataflowBlockOptions { EnsureOrdered = false }; TransformManyBlock <object, MxHostTestDetails> queuePoller = new TransformManyBlock <object, MxHostTestDetails>(_ => GetMxHostToProcess(_), singleItemBlockOptions); TransformBlock <MxHostTestDetails, MxHostTestDetails> retestPeriodFilter = new TransformBlock <MxHostTestDetails, MxHostTestDetails>((Func <MxHostTestDetails, MxHostTestDetails>)MarkTestToSkip, singleItemBlockOptions); BufferBlock <MxHostTestDetails> buffer = new BufferBlock <MxHostTestDetails>(bufferBlockOptions); TransformManyBlock <MxHostTestDetails, MxHostTestDetails> duplicateFilter = new TransformManyBlock <MxHostTestDetails, MxHostTestDetails>(_ => FilterHosts(_), singleItemBlockOptions); List <TransformBlock <MxHostTestDetails, MxHostTestDetails> > mxTestProcessors = Enumerable .Range(1, _config.TlsTesterThreadCount) .Select(index => new TransformBlock <MxHostTestDetails, MxHostTestDetails>(CreateTlsTester(Guid.NewGuid()), singleItemBlockOptions)) .ToList(); BatchBlock <MxHostTestDetails> resultBatcher = new BatchBlock <MxHostTestDetails>(_config.PublishBatchSize, batchingBlockOptions); Timer timer = new Timer(_ => { resultBatcher.TriggerBatch(); _log.LogDebug("Batch triggered."); }); TransformBlock <MxHostTestDetails, MxHostTestDetails> batchFlusher = new TransformBlock <MxHostTestDetails, MxHostTestDetails>(ResetTimer(timer), unboundedBlockOptions); TransformBlock <MxHostTestDetails[], MxHostTestDetails[]> resultPublisher = new TransformBlock <MxHostTestDetails[], MxHostTestDetails[]>(_ => PublishResults(_), unboundedBlockOptions); ActionBlock <MxHostTestDetails[]> deleteFromQueue = new ActionBlock <MxHostTestDetails[]>(_ => RemoveFromQueue(_), unboundedBlockOptions); queuePoller.LinkTo(retestPeriodFilter, linkOptions); retestPeriodFilter.LinkTo(batchFlusher, nonPropagatingLinkOptions, x => x.SkipTesting); retestPeriodFilter.LinkTo(buffer, linkOptions, x => !x.SkipTesting); buffer.LinkTo(duplicateFilter, linkOptions); mxTestProcessors.ForEach(processor => { duplicateFilter.LinkTo(processor, linkOptions); processor.LinkTo(batchFlusher, nonPropagatingLinkOptions); }); batchFlusher.LinkTo(resultBatcher, linkOptions); resultBatcher.LinkTo(resultPublisher, linkOptions); resultPublisher.LinkTo(deleteFromQueue, linkOptions); var blocks = new Dictionary <string, Func <int> > { ["Queued"] = () => queuePoller.OutputCount + buffer.Count, ["Processing"] = () => _processingFilter.HostCount, }; var processorTasks = mxTestProcessors.Select(processor => processor.Completion).ToArray(); // Start the stats print loop var statsTask = PrintStats(blocks, cancellationToken); await RunPipeline(queuePoller, cancellationToken); _log.LogInformation("Shutting down TLS Tester"); queuePoller.Complete(); _log.LogInformation("Waiting for test processors to complete..."); await Task.WhenAll(processorTasks); _log.LogInformation("Test processors complete. Flushing results..."); batchFlusher.Complete(); _log.LogInformation("Waiting for results flush and final shutdown..."); await Task.WhenAll( statsTask, deleteFromQueue.Completion ); _log.LogInformation("TLS tester shut down. Exiting."); }
public IDisposable LinkTo(ITargetBlock <List <ResourceEvent <TResource> > > target, DataflowLinkOptions linkOptions) { //todo: add support for max messages lock (_lock) { var link = new TargetLink { Target = target, LinkOptions = linkOptions, LastOfferedMessageReply = DataflowMessageStatus.NotAvailable }; if (linkOptions.Append) { _targets.Add(link); } else { _targets.Insert(0, link); } OfferMessageToLink(link); return(Disposable.Create(() => _targets.Remove(link))); } }
public IDisposable LinkTo(ITargetBlock <PmCaptureBase> target, DataflowLinkOptions linkOptions) { return(this._pmCapturesBufferBlock.LinkTo(target, linkOptions)); }
public ITargetDataflowBuilder <TInput> LinkToTarget(ITargetBlock <TOutput> targetBlock, DataflowLinkOptions linkOptions, Predicate <TOutput> predicate) { if (targetBlock == null) { throw new ArgumentNullException("targetBlock"); } LinkHelper.Link(_finalSourceBlock, targetBlock, linkOptions, predicate); return(new TargetDataflowBuilder <TInput>(_originalTargetBlock, targetBlock)); }
public IDisposable LinkTo(ITargetBlock <FormatFile> target, DataflowLinkOptions linkOptions) { return(GenerateTestClassBlock.LinkTo(target, linkOptions)); }
/// <include file='..\XmlDocs/CommonXmlDocComments.xml' path='CommonXmlDocComments/Sources/Member[@name="LinkTo"]/*' /> IDisposable ISourceBlock <T> .LinkTo(ITargetBlock <T> target, DataflowLinkOptions linkOptions) { throw new NotSupportedException(SR.NotSupported_MemberNotNeeded); }
protected virtual void OnLinkTo(string name, ITargetBlock <TOutput> target, DataflowLinkOptions linkOptions) { }
// DataFlow baseline. Similar CPU & throughput results, but memory keeps growing. public void RunDataFlowPipeline <T>(Func <int, T> create, Func <int, T, T> initialize, Func <T, T, T> increment, Func <T, T, T, T> add, Func <T, int> extract, bool validateNoLoss, bool validateSync) { int resultCount = 0; var dfo = new DataflowLinkOptions(); dfo.Append = true; List <object> saved = new List <object>(); // create several parallel branches of components var branches = new ISourceBlock <Wrap <T> > [ParallelBranchCount]; var sources = new Time.TimerDelegate[SourceCount]; for (int i = 0; i < SourceCount; i++) { // make a timer for each source var timerSeqId = 0; var timer = new TransformBlock <int, int>(ts => timerSeqId++); sources[i] = new Time.TimerDelegate((uint timerID, uint msg, UIntPtr userCtx, UIntPtr dw1, UIntPtr dw2) => timer.Post(i)); saved.Add(timer); // branch and generate data for (int k = 0; k < ParallelBranchMultiplier; k++) { int b = (i * ParallelBranchMultiplier) + k; var initInst = new Wrap <T>(create(b), 0); var init = new TransformBlock <int, Wrap <T> >(seqId => initInst = new Wrap <T>(initialize(seqId, initInst.Inner), seqId).DeepClone()); timer.LinkTo(init, dfo); branches[b] = init; saved.Add(init); // apply a sequence of transforms for (int j = 0; j < TransformCount; j++) { var incInst = new Wrap <T>(create(b), 0); var inc = new TransformBlock <Wrap <T>, Wrap <T> >(src => incInst = new Wrap <T>(increment(incInst.Inner, src.Inner), src.ExpectedResult + 1).DeepClone()); branches[b].LinkTo(inc, dfo); branches[b] = inc; saved.Add(inc); } // make sure we didn't lose messages // branches[b] = branches[b].DoT(m => CheckMessageId(m.SequenceId + TransformCount, m.Data.ExpectedResult, validateNoLoss), true, true); } } // join all var fullJoin = branches[0]; for (int i = 1; i < ParallelBranchCount; i++) { var joinGo = new GroupingDataflowBlockOptions(); joinGo.Greedy = false; var join = new JoinBlock <Wrap <T>, Wrap <T> >(joinGo); fullJoin.LinkTo(join.Target1, dfo); branches[i].LinkTo(join.Target2, dfo); var addInst = new Wrap <T>(create(i), 0); var select = new TransformBlock <Tuple <Wrap <T>, Wrap <T> >, Wrap <T> >(tpl => addInst = new Wrap <T>(add(addInst.Inner, tpl.Item1.Inner, tpl.Item2.Inner), tpl.Item1.ExpectedResult + tpl.Item2.ExpectedResult).DeepClone()); join.LinkTo(select, dfo); fullJoin = select; saved.Add(join); saved.Add(select); } // extract final result var result = new TransformBlock <Wrap <T>, Wrap <long> >(w => new Wrap <long>(extract(w.Inner), w.ExpectedResult)); fullJoin.LinkTo(result, dfo); saved.Add(result); // validate result int actionSeqId = 0; var final = new ActionBlock <Wrap <long> >(w => { resultCount++; this.CheckMessageId(++actionSeqId, resultCount, validateNoLoss); if (w.Inner != w.ExpectedResult) { throw new Exception("Unexpected computation result."); } }); result.LinkTo(final, dfo); saved.Add(final); // run the pipeline for (int i = 0; i < SourceCount; i++) { Platform.Specific.TimerStart(1000 / this.frequency, sources[i]); } while (!final.Completion.Wait(1000)) { Console.WriteLine(resultCount); if (sources.Length == 0) { throw new Exception("This was here just to keep source alive in release mode, why did it hit?"); } } Console.WriteLine("Stopped"); Assert.AreNotEqual(0, resultCount); }
/// <inheritdoc/> public IDisposable LinkTo(ITargetBlock <PublishResponse> target, DataflowLinkOptions linkOptions) { return(this.publishResponses.LinkTo(target, linkOptions)); }
public async Task TestLinkTo_MaxMessages() { Assert.Throws<ArgumentOutOfRangeException>(() => new DataflowLinkOptions { MaxMessages = -2 }); Assert.Throws<ArgumentOutOfRangeException>(() => new DataflowLinkOptions { MaxMessages = 0 }); const int MaxMessages = 3, ExtraMessages = 2; for (int mode = 0; mode < 3; mode++) { int consumedMessages = 0, remainingMessages = 0; var options = new DataflowLinkOptions() { MaxMessages = MaxMessages }; var source = new BufferBlock<int>(); var target = new ActionBlock<int>(x => consumedMessages++); var otherTarget = new ActionBlock<int>(x => remainingMessages++); switch (mode) { case 0: source.LinkTo(target, options); break; case 1: source.LinkTo(target, options, x => true); // Injects FilteredLinkPropagator break; case 2: using (source.LinkTo(target)) source.LinkTo(target, options); // Injects NopLinkPropagator break; } source.LinkTo(otherTarget); source.PostRange(0, MaxMessages + ExtraMessages); source.Complete(); await source.Completion; target.Complete(); otherTarget.Complete(); await Task.WhenAll(target.Completion, otherTarget.Completion); Assert.Equal(expected: MaxMessages, actual: consumedMessages); Assert.Equal(expected: ExtraMessages, actual: remainingMessages); } }
IDisposable ISourceBlock <T> .LinkTo(ITargetBlock <T> target, DataflowLinkOptions linkOptions) => this._core.LinkTo(target, linkOptions);
public IPropagatorDataflowBuilder <TInput, TOutput2> LinkToPropagator <TOutput2>(IPropagatorBlock <TOutput, TOutput2> propagatorBlock, DataflowLinkOptions linkOptions, Predicate <TOutput> predicate) { if (propagatorBlock == null) { throw new ArgumentNullException("propagatorBlock"); } LinkHelper.Link(_finalSourceBlock, propagatorBlock, linkOptions, predicate); return(new PropagatorDataflowBuilder <TInput, TOutput2>(_originalTargetBlock, _finalSourceBlock, propagatorBlock, _propagateCompletion)); }
public async Task <IEnumerable <DummyData> > LoadAsync(IEnumerable <Uri> uris) { IList <DummyData> result; using (var client = new HttpClient()) { // downloader block with parallelism limit var downloader = new TransformBlock <Uri, HttpResponseMessage>( async u => await client.GetAsync(u), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = _maxParallelism, EnsureOrdered = false, SingleProducerConstrained = true }); // deserializer, unbound parallelism var deserializer = new TransformBlock <HttpResponseMessage, DummyData>( async r => { using (Stream s = await r.Content.ReadAsStreamAsync()) using (var sr = new StreamReader(s)) using (JsonReader reader = new JsonTextReader(sr)) { return(Serializer.Deserialize <DummyData>(reader)); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false, SingleProducerConstrained = true }); // buffer to access result var buffer = new BufferBlock <DummyData>( new ExecutionDataflowBlockOptions { EnsureOrdered = false }); // link blocks together var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; downloader.LinkTo(deserializer, linkOptions); deserializer.LinkTo(buffer, linkOptions); // start sending input foreach (Uri uri in uris) { await downloader.SendAsync(uri); } // flag input completed downloader.Complete(); // await whole pipeline, get result await deserializer.Completion; buffer.TryReceiveAll(out result); } return(result); }
static async Task Main(string[] args) { Form1 f = new Form1(); f.ShowDialog(); //TaskScheduler.Current //在From程序中可传递UI线程的上下文 //var uiContext = SynchronizationContext.Current; WriteLine($"UI线程ID:{CurrentThread.ManagedThreadId}"); Observable.Interval(TimeSpan.FromSeconds(1)) .ObserveOn(Scheduler.Default) .Subscribe(x => WriteLine($"Interval {x} On Thread:{Environment.CurrentManagedThreadId}")); WriteLine("----------分割线----------"); ReadLine(); var timer = new System.Timers.Timer(1000) { Enabled = true }; //方法1 //var ticks = Observable.FromEventPattern<ElapsedEventHandler, ElapsedEventArgs>( // //转换器 ElapsedEventHandler转换成ElapsedEventArgs // handler => (s, a) => handler(s, a), // //订阅退阅 // handler => timer.Elapsed += handler, // handler => timer.Elapsed -= handler // ); //方法2 //使用反射机制,转换并订阅退订Timer的Elapsed事件 var ticks = Observable.FromEventPattern(timer, "Elapsed"); //data.EventArgs依然是强类型 ticks.Subscribe(data => WriteLine($"On Next:{((ElapsedEventArgs)data.EventArgs).SignalTime}")); //ticks.Subscribe(data => WriteLine($"On Next:{data.EventArgs.SignalTime}")); var progress = new Progress <int>(); //订阅与退订 var progressSupports = Observable.FromEventPattern <int>( handler => progress.ProgressChanged += handler, handler => progress.ProgressChanged -= handler ); //data.EventArg是强类型的int progressSupports.Subscribe(data => WriteLine($"On Next:{data.EventArgs}")); var mulitplyBlock = new TransformBlock <int, int>( item => item * 2, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded } ); var substract = new TransformBlock <int, int>(item => item - 2); //mulitplyBlock相当于那个需要计算量很大的数据流块,所以开启了并行处理的方式 //上面的设置,可以允许执行任意数量的并行任务 mulitplyBlock.LinkTo(substract); var sourceBuffer = new BufferBlock <int>(); var operation = new DataflowBlockOptions { BoundedCapacity = 1 }; var targetA = new BufferBlock <int>(operation); var targetB = new BufferBlock <int>(operation); //A的缓冲区>1时,数据会流向B sourceBuffer.LinkTo(targetA); sourceBuffer.LinkTo(targetB); var mutliplayBlock = new TransformBlock <int, int>(item => item * 2); var subtractBlock = new TransformBlock <int, int>(item => item - 2); //建立连接后,从mutliplayBlock出来的数据将进入subtractBlock mutliplayBlock.LinkTo(subtractBlock); var options = new DataflowLinkOptions { PropagateCompletion = true }; mutliplayBlock.LinkTo(subtractBlock, options); //.... //第一块的完成的情况自动传递给第二个块 mutliplayBlock.Complete(); await subtractBlock.Completion; int[] demoInts = new int[] { 1, 2, 3 }; int intResult = ParallelSum(demoInts); int pinqresult = PLINQSum(demoInts);//结果:6 Task <int> TaskA = DelayAndReturnAsync(3); Task <int> TaskB = DelayAndReturnAsync(2); Task <int> TaskC = DelayAndReturnAsync(1); var taskArray = new[] { TaskA, TaskB, TaskC }; foreach (var item in taskArray.OrderByCompletion()) { var rr = await item; Trace.Write($"Task Order Result:{rr}"); } WriteLine("-------------------------"); ReadLine(); taskArray = new[] { TaskA, TaskB, TaskC }; var processTasks = (from t in taskArray select AwaitAndProcessAsync(t)).ToArray(); await Task.WhenAll(processTasks); WriteLine("-------------------------"); ReadLine(); foreach (var t in taskArray) { var rrTask = await t; Trace.Write($"Trace:{rrTask}"); } ReadLine(); //await--等待异步方法执行完成并获取结果 int Mathresult = await MathTaskDemo(); WriteLine(Mathresult.ToString()); #region ReplaySubject <T>缓存值的功能,然后为任何延迟订阅重播它们 var sub2 = new ReplaySubject <string>(); sub2.OnNext("a"); WriteSequenceToConsole(sub2); sub2.OnNext("b"); sub2.OnNext("c"); Console.ReadKey(); var sub1 = new Subject <string>(); sub1.OnNext("a"); //我们已经将我们的第一个数据移到我们的订阅之前 WriteSequenceToConsole(sub1); sub1.OnNext("b"); sub1.OnNext("c"); Console.ReadKey(); #endregion #region 0628Rx响应式编程学习 WriteLine("IObserable的抽象实现----------"); //IObserable的抽象实现 var subject = new Subject <string>(); WriteSequenceToConsole(subject); subject.OnNext("a"); subject.OnNext("b"); subject.OnNext("c"); Console.ReadKey(); #endregion #region 0627Rx响应式编程学习 var numbers = new MySequenceOfNumbers(); var observer = new MyConsoleObserver <int>(); numbers.Subscribe(observer); Console.ReadLine(); #endregion #region 生产者消费者模拟 Console.WriteLine("生产者消费者模拟......"); for (int i = 1; i < 9; i++) { Thread T1 = new Thread(new ThreadStart(Producer)); Thread T2 = new Thread(new ThreadStart(Customer)); T1.Name = String.Format("生产者线程{0}", i); T2.Name = String.Format("消费者线程{0}", i); T1.Start(); T2.Start(); } Console.ReadKey(); #endregion #region 进程互斥 for (int i = 0; i <= numThreads; i++) { Thread myThread = new Thread(new ThreadStart(UseResource)); myThread.Name = String.Format("线程{0}", i + 1); myThread.Start(); } Console.ReadKey(); #endregion #region 进程同步(同一进程不同线程处理同一个任务) Console.WriteLine("进程1执行完了进程2才能执行......."); Thread Thread1 = new Thread(new ThreadStart(Proc1)); Thread Thread2 = new Thread(new ThreadStart(Proc2)); Thread1.Start(); Thread2.Start(); Console.ReadKey(); #endregion #region 0613Linq使用探索 foreach (int item in YieldGetList(5)) { Console.WriteLine(item); } foreach (int item in GetList(5)) { Console.WriteLine(item); } Console.ReadKey(); #endregion //.SubscribeOn(NewThreadScheduler.Default) #region 使用SubscribeOn控制订阅(subscribing)的上下文 WriteLine("Starting on threadId:{0}", Thread.CurrentThread.ManagedThreadId); var source = Observable.Create <int>( s => { WriteLine("Invoked on threadId:{0}", Thread.CurrentThread.ManagedThreadId); s.OnNext(1); s.OnNext(2); s.OnNext(3); s.OnCompleted(); WriteLine("Finished on threadId:{0}", CurrentThread.ManagedThreadId); return(Disposable.Empty); }); source .SubscribeOn(CurrentThreadScheduler.Instance) .Subscribe( s => WriteLine("Received {1} on threadId:{0}", CurrentThread.ManagedThreadId, s), () => WriteLine("OnCompleted on threadId:{0}", CurrentThread.ManagedThreadId)); WriteLine("Subscribed on threadId:{0}", Thread.CurrentThread.ManagedThreadId); Read(); #endregion #region Rx默认工作模式探索 WriteLine("Starting on threadId:{0}", CurrentThread.ManagedThreadId); var sub = new Subject <Object>(); //订阅元素处理程序到可观察序列。 sub.Subscribe(oo => WriteLine("Received {1} on threadId:{0}",//为Observable订阅处理器(handler)输出handler thread id CurrentThread.ManagedThreadId, oo)); ParameterizedThreadStart notify = obj =>//委托定义,其内输出被观察对象的thread id { //打印放进队列的value WriteLine("OnNext({1}) on threadId:{0}", CurrentThread.ManagedThreadId, obj); //在序列中通知所有订阅的观察者指定元素的到达情况 //obj为传递到序列的新函数 sub.OnNext(obj); }; notify(1); new Thread(notify).Start(2); new Thread(notify).Start(3); WriteLine("Enter to add new value to queue"); ReadLine(); new Thread(notify).Start(5); ReadLine(); #endregion #region 0610使用TPL数据流库来实现并行管道 var TPLDemp = ProcessAsyncChronous(); //通知所有等待线程,结束等待,开始执行 TPLDemp.GetAwaiter().GetResult(); WriteLine($"使用TPL数据流库来实现并行管道运行完成!"); ReadLine(); #endregion #region 使用BlockingCollection实现并行管道 var cts = new CancellationTokenSource(); Task.Run(() => { if (ReadKey().KeyChar == 'c') { cts.Cancel(); } }, cts.Token); var sourceArrays = new BlockingCollection <int> [CollectionNumber]; for (int i = 0; i < sourceArrays.Length; i++) { sourceArrays[i] = new BlockingCollection <int>(Count); } //Func有参有返回值的委托方法 var convertToDecimal = new PiplineWorker <int, decimal>(sourceArrays, n => Convert.ToDecimal(n * 100), cts.Token, "Decimal Converter"); //Action有参无返回值委托方法 var stringifyNumber = new PiplineWorker <decimal, string>(convertToDecimal.Output, s => WriteLine($"--{s.ToString("C", CultureInfo.GetCultureInfo("en-us"))}"), cts.Token, "String Formatter"); var outputResultToConsole = new PiplineWorker <string, string>(stringifyNumber. Output, s => WriteLine($"The final result is {s} on thread" + $"{CurrentThread.ManagedThreadId}"), cts.Token, "Console Output"); try { Parallel.Invoke( () => CreateInitialValues(sourceArrays, cts), () => convertToDecimal.Run(), () => stringifyNumber.Run(), () => outputResultToConsole.Run() ); } catch (AggregateException ae) { foreach (var ex in ae.InnerExceptions) { WriteLine(ex.Message + ex.StackTrace); } } if (cts.Token.IsCancellationRequested) { WriteLine("Operation has been canceled! Press Enter to exit."); } else { WriteLine("Press Enter to exit."); } WriteLine("使用BlockingCollection实现并行管道Demo执行完成........"); ReadLine(); #endregion #region 普通遍历与推送订阅模式(基于Rx库)对比 //-----普通遍历与推送订阅模式对比--------- foreach (var e in EnumerableEventSequence()) { Write(e); } WriteLine(); WriteLine("IEnumberable"); IObservable <int> o = EnumerableEventSequence().ToObservable(); using (IDisposable d = o.Subscribe(Write)) { WriteLine(); WriteLine("IObservable"); } o = EnumerableEventSequence().ToObservable().SubscribeOn(TaskPoolScheduler.Default); using (IDisposable subscription = o.Subscribe(Write)) { WriteLine(); WriteLine("IObservable async"); ReadLine(); } WriteLine("-------------"); WriteLine("Rx库使用示例-----1"); #endregion #region PLinq并行查询--Parallel并行执行函数 05-28之前 ReadLine(); var PLinqDemo = from t in GetTypes().AsParallel() select EmulateProcessing(t); var ppcts = new CancellationTokenSource(); ppcts.CancelAfter(TimeSpan.FromSeconds(3)); try { //调整并行查询参数 PLinqDemo.WithDegreeOfParallelism(Environment.ProcessorCount) .WithExecutionMode(ParallelExecutionMode.ForceParallelism) .WithMergeOptions(ParallelMergeOptions.Default) .WithCancellation(ppcts.Token) .ForAll(WriteLine); } catch (OperationCanceledException e) { WriteLine("-------------"); WriteLine("Operation has been canceled."); } //------------------------------------------------------------- WriteLine("-------------"); WriteLine("Unordered PLINQ query execution."); //无序并行查询 var unOrderderQuery = from t in ParallelEnumerable.Range(1, 30) select t; foreach (var item in unOrderderQuery) { WriteLine(item); } //------------------------------------------------------------- WriteLine("-------------"); WriteLine("ordered PLINQ query execution."); //顺序并行查询 var orderedQuery = from t in ParallelEnumerable.Range(1, 30).AsOrdered() select t; foreach (var item in orderedQuery) { WriteLine(item); } WriteLine("-------------调整Parallel并行查询参数到此结束!-----------------"); ReadLine(); //------------------------------------------------------------- Stopwatch sw = new Stopwatch(); sw.Start(); //方式1 //单例查询 速度最慢 单例遍历 var Query = from t in GetTypes() select EmulateProcessing(t); foreach (string typeName in Query) { PrintInfo(typeName); } sw.Stop(); WriteLine("-------------"); WriteLine("Sequential Linq Query."); WriteLine($"Time elspsed:{sw.Elapsed}"); WriteLine("Please Enter to continue..."); ReadLine(); sw.Reset(); //----------------------------------------------------------------------------- sw.Start(); //方式2 //查询并行化,结果单线程遍历 var parallelQuery = from t in GetTypes().AsParallel()//启用查询并行化 select EmulateProcessing(t); foreach (string typeName in parallelQuery) { PrintInfo(typeName); } sw.Stop(); WriteLine("-------------"); WriteLine($"Paralle Linq Query.The results are being merged on a single thread"); WriteLine($"Time elspsed:{sw.Elapsed}"); WriteLine("Please Enter to continue..."); ReadLine(); sw.Reset(); //----------------------------------------------------------------------------- sw.Start(); //方式3 //查询并行 遍历并行 parallelQuery = from t in GetTypes().AsParallel() select EmulateProcessing(t); //使用指定的操作为每个元素并行调用source //遍历并行 parallelQuery.ForAll(PrintInfo); sw.Stop(); WriteLine("-------------"); WriteLine("Parallel Linq Query.The result are being processed in parallel"); WriteLine($"Time elspsed:{sw.Elapsed}"); WriteLine("Please Enter to continue..."); ReadLine(); sw.Reset(); //----------------------------------------------------------------------------- sw.Start(); //方式4 //并行运算强制转换成顺序运算,单例遍历,效率最低 Query = from t in GetTypes().AsParallel().AsSequential() select EmulateProcessing(t); foreach (string typeName in Query) { PrintInfo(typeName); } sw.Stop(); WriteLine("-------------"); WriteLine("Parallel Linq Query.transformed into sequential"); WriteLine($"Time elspsed:{sw.Elapsed}"); WriteLine("Please Enter to continue..."); ReadLine(); WriteLine("-------------Parallel并行查询到此结束!-----------------"); ReadLine(); //尽可能并行执行提供的每个操作 //NameSpace:System.Threading.Tasks Parallel.Invoke(() => EmulateProcessing("Task1"), () => EmulateProcessing("Task2"), () => EmulateProcessing("Task3") ); var pcts = new CancellationTokenSource(); //并行迭代 var result = Parallel.ForEach(Enumerable.Range(1, 30), new ParallelOptions() { CancellationToken = pcts.Token, MaxDegreeOfParallelism = Environment.ProcessorCount, TaskScheduler = TaskScheduler.Default }, (i, state) => { WriteLine(i); if (i == 20) { //告知 System.Threading.Tasks.Parallel 循环应在系统方便的时候尽早停止执行当前迭代之外的迭代。 state.Break(); WriteLine($"Loop is Stop:{state.IsStopped}"); } }); WriteLine("-------------"); //获取指示循环已完成运行,以便所有的循环迭代期间执行,并且该循环没有收到提前结束的请求。 WriteLine($"Is Completed:{result.IsCompleted}"); //从中获取的最低迭代索引 System.Threading.Tasks.ParallelLoopState.Break 调用。 WriteLine($"Lowest break iteration:{result.LowestBreakIteration}"); WriteLine("-------------||-----------------"); ReadLine(); Task tq = MyAsyncWithAwaitQuickly(); tq.Wait(); //Task tl = MyAsyncWithAwaitLow(); //tl.Wait(); WriteLine("------------------------------"); ReadLine(); Task t1 = AsyncLambda(); t1.Wait(); #endregion }
static void Main() { var sw = new Stopwatch(); sw.Start(); var loadSourceFileToMemory = new TransformBlock <string, string>(async path => { Console.WriteLine("Loading to memory '{0}'...", path); using (StreamReader SourceReader = File.OpenText(path)) { return(await SourceReader.ReadToEndAsync()); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = MaxDegreeOfParallelismLoad }); var generateTestClass = new TransformManyBlock <string, TestClass>(async text => { Console.WriteLine("Generating test classes..."); TestClassGenerator classGenerator = new TestClassGenerator(); return(classGenerator.GenerateTestClasses(text)); }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = MaxDegreeOfParallelismGenerate }); var saveTestClassToFile = new ActionBlock <TestClass>(async testClass => { using (StreamWriter DestinationWriter = File.CreateText(DestPath + testClass.FileName)) { Console.WriteLine("Saving '{0}' on disk...", testClass.FileName); await DestinationWriter.WriteAsync(testClass.Source); Console.WriteLine("Saved '{0}'!", testClass.FileName); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = MaxDegreeOfParallelismSave }); var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; loadSourceFileToMemory.LinkTo(generateTestClass, linkOptions); generateTestClass.LinkTo(saveTestClassToFile, linkOptions); foreach (var path in _input) { loadSourceFileToMemory.Post(path); } loadSourceFileToMemory.Complete(); saveTestClassToFile.Completion.Wait(); sw.Stop(); Console.WriteLine(sw.ElapsedMilliseconds); }
static async void StartCrawling() { if (!Directory.Exists("Images")) { Directory.CreateDirectory("Images"); } try { #region Dataflow block Options var downloaderOptions = new ExecutionDataflowBlockOptions { // enforce fairness, after handling n messages // the block's task will be re-schedule. // this will give the opportunity for other block // to actively process there messages (to avoid over subscription // the Tpl dataflow does not schedule all task at once if the machine // does not have enough cores) MaxMessagesPerTask = DOWNLOADER_MAX_MESSAGE_PER_TASK, // by default Tpl dataflow assign a single task per block, // but you can control it by using the MaxDegreeOfParallelism MaxDegreeOfParallelism = 2, //DOWNLOADER_MAX_DEGREE_OF_PARALLELISM, // the size of the block input buffer BoundedCapacity = DOWNLOADER_BOUNDED_CAPACITY }; var transformerOptions = new ExecutionDataflowBlockOptions { MaxMessagesPerTask = MAX_MESSAGE_PER_TASK, }; var writerOptions = new ExecutionDataflowBlockOptions { MaxMessagesPerTask = WRITER_MAX_DEGREE_OF_PARALLELISM, MaxDegreeOfParallelism = DOWNLOADER_MAX_DEGREE_OF_PARALLELISM }; var linkOption = new DataflowLinkOptions { PropagateCompletion = true }; #endregion // Dataflow block Options #region Downloader var downloader = new TransformBlock <string, XElement>( async(url) => { try { #region Validation //if (_urls.ContainsKey(url)) // return null; if (!_urls.TryAdd(url, true)) { return(null); } #endregion // Validation // using IOCP the thread pool worker thread does return to the pool WebClient wc = new WebClient(); Task <string> download = wc.DownloadStringTaskAsync(url); Task cancel = Task.Delay(DOWNLOAD_TIMEOUT_SEC * 1000); Task any = await Task.WhenAny(download, cancel).ConfigureAwait(false); #region Timeout validation if (any == cancel) { wc.CancelAsync(); WriteToConsole("Cancel: [{0}]", ConsoleColor.Gray, url); return(null); } #endregion // Timeout validation string html = download.Result; WriteToConsole("Downloaded: {0}", ConsoleColor.White, url); XElement e = HtmlToXElement(html); return(e); } #region Exception Handling catch (WebException ex) { WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, url, ex.Message); } catch (AggregateException ex) { foreach (var exc in ex.Flatten().InnerExceptions) { WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, url, exc.Message); } } catch (Exception ex) { WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message); } #endregion // Exception Handling return(null); }, downloaderOptions); #endregion // Downloader #region Link Parser var linkParser = new TransformManyBlock <XElement, string>( (html) => { #region Validation if (html == null) { return(Enumerable.Empty <string>()); } #endregion // Validation var linkes = from item in html.Descendants() where item.Name.LocalName == "a" let href = item.Attribute("href") where href != null select href.Value; try { var result = linkes.ToArray(); return(result); } #region Exception Handling catch (Exception ex) { WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message); return(Enumerable.Empty <string>()); } #endregion // Exception Handling }, transformerOptions); #endregion // Link Parser #region Image Parser var imgParser = new TransformManyBlock <XElement, string>( (html) => { var images = from item in html.Descendants() where item.Name.LocalName == "img" let src = item.Attribute("src") where src != null select src.Value; try { var result = images.ToArray(); return(result); } #region Exception Handling catch (Exception ex) { WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message); return(Enumerable.Empty <string>()); } #endregion // Exception Handling }, transformerOptions); #endregion // Image Parser #region Writer var writer = new ActionBlock <string>(async url => { try { if (string.IsNullOrEmpty(url)) { return; } WebClient wc = new WebClient(); // using IOCP the thread pool worker thread does return to the pool byte[] buffer = await wc.DownloadDataTaskAsync(url).ConfigureAwait(false); string fileName = Path.GetFileName(url); #region Validation if (!_images.TryAdd(fileName, true)) { return; } #endregion // Validation string name = @"Images\" + fileName; using (var image = Image.FromStream(new MemoryStream(buffer))) { if (image.Width > MIN_SIZE.Width && image.Height > MIN_SIZE.Height) { using (Stream srm = OpenWriteAsync(name)) { await srm.WriteAsync(buffer, 0, buffer.Length).ConfigureAwait(false); WriteToConsole("{0}: Width:{1}, Height:{2}", ConsoleColor.Yellow, fileName, image.Width, image.Height); } } } } #region Exception Handling catch (WebException ex) { WriteToConsole("Error: [{0}]\r\n\t{1}", ConsoleColor.Red, url, ex.Message); } catch (Exception ex) { WriteToConsole("Unexpected error: {0}", ConsoleColor.Red, ex.Message); } #endregion // Exception Handling }, writerOptions); #endregion // Writer var garbageA = DataflowBlock.NullTarget <XElement>(); var garbageB = DataflowBlock.NullTarget <string>(); // use to broadcast the downloader output to the image and link parsers var contentBroadcaster = new BroadcastBlock <XElement>(s => s); var linkBroadcaster = new BroadcastBlock <string>(s => s); #region LinkTo /////////////////////////////////////////////////////////////////////// // garbage <------- downloader <------------------------- // // | | // // contentBroadcaster | // // / \ | // // imgParser linkParsers--->linkBroadcaster // // | | // // writer <----------------------------------------- // /////////////////////////////////////////////////////////////////////// downloader.LinkTo(contentBroadcaster, linkOption, html => html != null); downloader.LinkTo(garbageA); //, linkOption /*, html => html == null*/); // fallback (otherwise empty messages will be stack in the block buffer and the block will never complete) contentBroadcaster.LinkTo(imgParser, linkOption); contentBroadcaster.LinkTo(linkParser, linkOption); linkParser.LinkTo(linkBroadcaster, linkOption); StringComparison comparison = StringComparison.InvariantCultureIgnoreCase; Predicate <string> linkFilter = link => link.StartsWith("http://"); Predicate <string> imgFilter = url => url.StartsWith("http://") && (url.EndsWith(".jpg", comparison) || url.EndsWith(".png", comparison) || url.EndsWith(".gif", comparison)); // Predicate<string> imgToGarbageFilter = url => !imgFilter(url); imgParser.LinkTo(writer, linkOption, imgFilter); imgParser.LinkTo(garbageB);// , imgToGarbageFilter); linkBroadcaster.LinkTo(writer, linkOption, imgFilter); linkBroadcaster.LinkTo(downloader, linkOption, linkFilter); //linkBroadcaster.LinkTo(garbage); #endregion // LinkTo downloader.Post(URL_CRAWL_TARGET); Console.WriteLine("Crawling"); Thread.Sleep(COMPLETE_AFTER_SEC * 1000); #region Complete downloader.Complete(); #region WriteToConsole ("Try to Complete...") ConsoleColor color = ConsoleColor.Yellow; WriteToConsole( @"Try to Complete (items in the buffer = downloader: is completed = {0}, input={1} , output={2} writer: is completed = {3}, input ={4} linkParser: is completed = {5}, input={6} , output={7} imgParser: is completed = {8}, input={9} , output={10} linkBroadcaster: is completed = {11}, contentBroadcaster: is completed = {12}", color, downloader.Completion.IsCompleted, downloader.InputCount, downloader.OutputCount, writer.Completion.IsCompleted, writer.InputCount, linkParser.Completion.IsCompleted, linkParser.InputCount, linkParser.OutputCount, imgParser.Completion.IsCompleted, imgParser.InputCount, imgParser.OutputCount, linkBroadcaster.Completion.IsCompleted, contentBroadcaster.Completion.IsCompleted); #endregion // WriteToConsole ("Try to Complete...") Task completeAll = Task.WhenAll( downloader.Completion, linkParser.Completion, imgParser.Completion, contentBroadcaster.Completion, writer.Completion); await Task.Run(async() => { while (!completeAll.IsCompleted) { await Task.Delay(2000).ConfigureAwait(false); #region WriteToConsole (status) color = color == ConsoleColor.Magenta ? ConsoleColor.White : ConsoleColor.Yellow; WriteToConsole( @"Complete Status (items in the buffer = downloader: is completed = {0}, input={1} , output={2} writer: is completed = {3}, input ={4} linkParser: is completed = {5}, input={6} , output={7} imgParser: is completed = {8}, input={9} , output={10} linkBroadcaster: is completed = {11}, contentBroadcaster: is completed = {12} ", color, downloader.Completion.IsCompleted, downloader.InputCount, downloader.OutputCount, writer.Completion.IsCompleted, writer.InputCount, linkParser.Completion.IsCompleted, linkParser.InputCount, linkParser.OutputCount, imgParser.Completion.IsCompleted, imgParser.InputCount, imgParser.OutputCount, linkBroadcaster.Completion.IsCompleted, contentBroadcaster.Completion.IsCompleted); } #endregion // WriteToConsole (status) }).ConfigureAwait(false); WriteToConsole("Complete (items in the writer input buffer = {0})", ConsoleColor.Green, writer.InputCount); #endregion // Complete } catch (Exception ex) { WriteToConsole("EXCEPTION: {0}", ConsoleColor.DarkRed, ex); } }
public Task StartPipeline() { cancellationTokenSource = new CancellationTokenSource(); ExecutionDataflowBlockOptions executionDataflowBlockOptions = new ExecutionDataflowBlockOptions { CancellationToken = cancellationTokenSource.Token, //MaxDegreeOfParallelism = MAXPARA }; broadcastSymbol = new BroadcastBlock <string>(symbol => symbol); var joinblock = new JoinBlock <List <decimal>, List <Dividend>, KeyStats>(new GroupingDataflowBlockOptions { Greedy = false }); GetCompanyInfo = new TransformBlock <string, CompanyInfo>(symbol => { return(RetrieveCompanyInfo(symbol)); }, executionDataflowBlockOptions); GetDividendReports = new TransformBlock <string, List <Dividend> >(symbol => { return(RetrieveDividendInfo(symbol)); }, executionDataflowBlockOptions); GetKeyStatInfo = new TransformBlock <string, KeyStats>(symbol => { return(RetrieveKeyStats(symbol)); }, executionDataflowBlockOptions); GetIntervalReports = new TransformBlock <string, List <Interval> >(symbol => { return(RetrieveIntervals(symbol, 30)); }, executionDataflowBlockOptions); GetChangesOverInterval = new TransformBlock <List <Interval>, List <decimal> >(intervals => { return(ConstructIntervalReport(intervals)); }, executionDataflowBlockOptions); GenerateXmlString = new TransformBlock <Tuple <List <decimal>, List <Dividend>, KeyStats>, string>(tup => { var ReportObj = new Report { changeIntervals = tup.Item1, dividends = tup.Item2, keyStats = tup.Item3 }; XmlSerializer ser = new XmlSerializer(typeof(Report)); var stringWriter = new StringWriter(); ser.Serialize(stringWriter, ReportObj); return(stringWriter.ToString()); }, executionDataflowBlockOptions); GenerateCompleteReport = new ActionBlock <string>(xml => { var str = Path.GetRandomFileName().Replace(".", "") + ".xml"; File.WriteAllText(str, xml); Console.WriteLine("Finished File"); }, executionDataflowBlockOptions); var options = new DataflowLinkOptions { PropagateCompletion = true }; var buffer = new BufferBlock <string>(); buffer.LinkTo(broadcastSymbol, options); //Broadcasts the symbol broadcastSymbol.LinkTo(GetIntervalReports, options); broadcastSymbol.LinkTo(GetDividendReports, options); broadcastSymbol.LinkTo(GetKeyStatInfo, options); //Second teir parallel GetIntervalReports.LinkTo(GetChangesOverInterval, options); //Joins the parallel blocks back together GetDividendReports.LinkTo(joinblock.Target2, options); GetKeyStatInfo.LinkTo(joinblock.Target3, options); GetChangesOverInterval.LinkTo(joinblock.Target1, options); joinblock.LinkTo(GenerateXmlString, options); GenerateXmlString.LinkTo(GenerateCompleteReport, options); buffer.Post("F"); buffer.Post("AGFS"); buffer.Post("BAC"); buffer.Post("FCF"); buffer.Complete(); broadcastSymbol.Completion.ContinueWith(tsk => { if (!tsk.IsFaulted) { GetIntervalReports.Complete(); GetDividendReports.Complete(); GetKeyStatInfo.Complete(); } else { ((IDataflowBlock)GetIntervalReports).Fault(tsk.Exception); ((IDataflowBlock)GetDividendReports).Fault(tsk.Exception); ((IDataflowBlock)GetKeyStatInfo).Fault(tsk.Exception); } }); //TODO need to finish pipeline and find better implementation GenerateCompleteReport.Completion.Wait(cancellationTokenSource.Token); return(Task.CompletedTask); }
public async Task Sort() { Console.WriteLine($"[{DateTime.Now}] Read file {Path.GetFileName(_sourceFileName)}"); Stopwatch stopwatch = Stopwatch.StartNew(); var fileSize = new FileInfo(_sourceFileName).Length; var maxDegreeOfParallelism = _sortConfig.MaxDegreeOfParallelismPerLevel; var partitionSize = EstimateSizeOfPartition(_sortConfig.ApproximateMemoryLimit, maxDegreeOfParallelism); if (Directory.Exists(_splitsFolder)) { Directory.Delete(_splitsFolder, true); } Directory.CreateDirectory(_splitsFolder); if (Directory.Exists(_mergeFolder)) { Directory.Delete(_mergeFolder, true); } Directory.CreateDirectory(_mergeFolder); var batchReader = new BufferBlock <BatchInfo>(new DataflowBlockOptions { BoundedCapacity = (int)(maxDegreeOfParallelism * 0.1 + 1) }); var sorterBlock = new TransformBlock <BatchInfo, BatchInfo>(l => PartitionSorter(l), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, BoundedCapacity = maxDegreeOfParallelism }); var batchSaverBlock = new TransformBlock <BatchInfo, string>(l => PartitionSaver(l), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, BoundedCapacity = maxDegreeOfParallelism }); var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; batchReader.LinkTo(sorterBlock, linkOptions); sorterBlock.LinkTo(batchSaverBlock, linkOptions); var partitioner = Partitioner(_sourceFileName, partitionSize, batchReader); var filesForMerge = await batchSaverBlock.ReceiveAllData(); await partitioner; await batchSaverBlock.Completion; Console.WriteLine($"[{DateTime.Now}] File splitted. Elapsed: {stopwatch.Elapsed}"); BatchBlock <string> sortedBufferBlock = null; TransformBlock <IEnumerable <string>, string> mergerBlock = null; while (filesForMerge.Count > 1) { var estimatedCacheSize = EstimateSizeOfCache(_sortConfig.ApproximateMemoryLimit, filesForMerge.Count, _sortConfig.MaxDegreeOfParallelismPerLevel); var batchBlockSize = filesForMerge.Count / _sortConfig.MaxDegreeOfParallelismPerLevel; if (batchBlockSize == 1) { batchBlockSize = 2; } if (batchBlockSize == 0) { batchBlockSize = _sortConfig.MaxDegreeOfParallelismPerLevel; } sortedBufferBlock = new BatchBlock <string>(batchBlockSize); mergerBlock = new TransformBlock <IEnumerable <string>, string>(l => Merge(l, estimatedCacheSize), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = maxDegreeOfParallelism, BoundedCapacity = maxDegreeOfParallelism }); sortedBufferBlock.LinkTo(mergerBlock, linkOptions); await sortedBufferBlock.SendAllData(filesForMerge); filesForMerge = await mergerBlock.ReceiveAllData(); await mergerBlock.Completion; } if (filesForMerge.Count == 0) { Console.WriteLine("Somthing went wrong. Result file is not created"); } else { File.Move(filesForMerge.ElementAt(0), _destFileName, true); Console.WriteLine($"[{DateTime.Now}] Order complete. Result file: {_destFileName}. Elapsed: {stopwatch.Elapsed}"); } }
public async Task RunSimulationAsync(string simulationId, string simulationName, IEnumerable <SimulationItem> simulationItems, SimulationIoTHubOptions simulationIoTHubOptions) { var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; var addDeviceBlock = new TransformBlock <DeviceBlockInformation, DeviceBlockInformation>(async(deviceBlockInformation) => { await deviceBlockInformation.DeviceSimulatorActor.AddDeviceAsync(deviceBlockInformation.DeviceSettings.DeviceServiceSettings, CancellationToken.None); return(deviceBlockInformation); }, new ExecutionDataflowBlockOptions() { MaxDegreeOfParallelism = 4 }); var createTwinBlock = new TransformBlock <DeviceBlockInformation, DeviceBlockInformation>(async(deviceBlockInformation) => { await deviceBlockInformation.DeviceSimulatorActor.CreateDeviceTwinAsync(deviceBlockInformation.DeviceSettings.DeviceServiceSettings, CancellationToken.None); return(deviceBlockInformation); }); var connectToHubBlock = new TransformBlock <DeviceBlockInformation, DeviceBlockInformation>(async(deviceBlockInformation) => { await deviceBlockInformation.DeviceSimulatorActor.ConnectToHubAsync(deviceBlockInformation.DeviceSettings, CancellationToken.None); return(deviceBlockInformation); }); ActionBlock <DeviceBlockInformation> sendEventBlock = null; sendEventBlock = new ActionBlock <DeviceBlockInformation>(async(deviceBlockInformation) => { await deviceBlockInformation.DeviceSimulatorActor.SendEventAsync(); await Task.Delay(deviceBlockInformation.DeviceSettings.DeviceServiceSettings.DeviceInterval); }); addDeviceBlock.LinkTo(createTwinBlock, linkOptions); createTwinBlock.LinkTo(connectToHubBlock, linkOptions); connectToHubBlock.LinkTo(sendEventBlock, linkOptions); // Enumerate the simulations and begin to stand up device instances for each one foreach (var simulationItem in simulationItems) { // Begin producing var deviceItems = Enumerable.Range(0, simulationItem.NumberOfDevices); foreach (var deviceIndex in deviceItems) { // Generate a unique id for this device var deviceNumber = deviceIndex.ToString("000000"); var deviceId = $"{simulationName}_{simulationItem.DeviceType}_{deviceNumber}"; var deviceSimulatorActor = ActorProxy.Create <IDeviceSimulator>(new ActorId(deviceId), deviceActorApplicationUri); var deviceSettings = new DeviceSettings() { InitialStateJson = simulationItem.InitialState, Script = simulationItem.Script, MessageType = simulationItem.MessageType, SimulationSettings = new SimulationSettings() { SimulationId = simulationId, SimulationName = simulationName }, Properties = simulationItem.Properties, DeviceServiceSettings = new DeviceServiceSettings() { DeviceType = simulationItem.DeviceType, DeviceName = deviceId, IoTHubConnectionString = simulationIoTHubOptions.IotHubConnectionString, IoTHubName = simulationIoTHubOptions.IoTHubName, DeviceInterval = simulationItem.Interval, } }; await addDeviceBlock.SendAsync(new DeviceBlockInformation() { DeviceSimulatorActor = deviceSimulatorActor, DeviceSettings = deviceSettings, }); } } // Signal that we've completed adding all the devices addDeviceBlock.Complete(); // Wait for all the devices to be running their simulations await sendEventBlock.Completion; }
public IDisposable LinkTo(ITargetBlock <T[]> target, DataflowLinkOptions linkOptions) { return(_batchBlock.LinkTo(target, linkOptions)); }
public IDisposable LinkTo(ITargetBlock <TOutput> target, DataflowLinkOptions linkOptions) { return(_transformBlock.LinkTo(target, linkOptions)); }
public IDisposable LinkTo(ITargetBlock <T> target, DataflowLinkOptions linkOptions) { _m_linkedTarget.TrySetResult(target); return(new NopDisposable()); }
/// <summary> /// Proxy to link <see cref="L7ConversationStatistics"/> to target. /// <br/><inheritdoc/> /// </summary> /// <inheritdoc/> public IDisposable LinkTo(ITargetBlock <L7ConversationStatistics> target, DataflowLinkOptions linkOptions) { return(this.L7ConversationStatistics.LinkTo(target, linkOptions)); }
public IDisposable LinkTo(ITargetBlock <BitmapSource> target, DataflowLinkOptions linkOptions) { return(_broadcast.LinkTo(target, linkOptions)); }
/// <summary> /// Proxy to link <see cref="Frames"/> to target. /// <br/><inheritdoc/> /// </summary> /// <inheritdoc/> public IDisposable LinkTo(ITargetBlock <PmFrameBase> target, DataflowLinkOptions linkOptions) { return(this.Frames.LinkTo(target, linkOptions)); }
static void Main(string[] args) { var showHelp = false; var insensitive = false; var recurse = false; var initialDirectory = Environment.CurrentDirectory; var options = new Mono.Options.OptionSet { { $"nfind.exe, version {ThisAssembly.AssemblyInformationalVersion}" }, { "" }, { "Searches text files for strings matching a given regular expression." }, { "" }, { "Usage:" }, { " nfind [<options>] <regex> <file pattern> [<file pattern> ...]" }, { "" }, { "Options:" }, { "h|?|help", "Show help and exit", v => showHelp = v != null }, { "i|insensitive", "Perform a case-insensitive match", v => insensitive = v != null }, { "r|recurse", "Recursively search subdirectories", v => recurse = v != null }, { "d=|directory=", "Directory to search", v => initialDirectory = v }, { "" }, { "Further Reading:" }, { " https://docs.microsoft.com/en-us/dotnet/standard/base-types/regular-expression-language-quick-reference" } }; var positionalArgs = options.Parse(args); if (showHelp) { options.WriteOptionDescriptions(Console.Out); return; } var regexOptions = RegexOptions.Compiled | RegexOptions.Singleline; if (insensitive) { regexOptions |= RegexOptions.IgnoreCase; } var regex = new Regex(positionalArgs[0], regexOptions); var patterns = positionalArgs.Skip(1); initialDirectory = Path.GetFullPath(initialDirectory); // Create the Dataflow blocks var fileReaderBlock = GetFileReaderBlock(); var matchingBlock = GetMatchBlock(regex); var outputBlock = GetOutputBlock(); // Link the blocks var linkOptions = new DataflowLinkOptions { PropagateCompletion = true }; fileReaderBlock.LinkTo(matchingBlock, linkOptions); matchingBlock.LinkTo(outputBlock, linkOptions); // Find the applicable files and pass them to the file reader. DirectoryInfo di = new DirectoryInfo(initialDirectory); foreach (var matchingFile in GetMatchingFiles(initialDirectory, patterns, recurse)) { fileReaderBlock.Post(matchingFile); } // Inform the file reader that there will be no more input, and wait for the output // block to finish. fileReaderBlock.Complete(); outputBlock.Completion.Wait(); }
public async Task TestLinkTo_Append() { var append = new DataflowLinkOptions() { Append = true, PropagateCompletion = true }; var prepend = new DataflowLinkOptions() { Append = false, PropagateCompletion = true }; var source = new BufferBlock<int>(); var targets = new ActionBlock<int>[6]; int[] consumedMessages = new int[targets.Length]; for (int i = 0; i < targets.Length; i++) { int localI = i; targets[localI] = new ActionBlock<int>(x => consumedMessages[localI]++); } int lostMessages = 0; var extraTarget = new ActionBlock<int>(x => lostMessages++); // Link in a different order but use prepend/append to get them into expected/right order source.LinkTo(targets[2], prepend, x => x <= 2); source.LinkTo(targets[3], append, x => x <= 3); using (source.LinkTo(extraTarget, prepend)) { source.LinkTo(targets[4], append, x => x <= 4); source.LinkTo(targets[1], prepend, x => x <= 1); using (source.LinkTo(extraTarget, append)) { source.LinkTo(targets[0], prepend, x => x <= 0); source.LinkTo(targets[5], append, x => x <= 5); using (source.LinkTo(extraTarget, prepend)) { } } } source.PostRange(0, targets.Length); // one message for each source source.Complete(); await source.Completion; await Task.WhenAll(from target in targets select target.Completion); Assert.All(consumedMessages, i => Assert.Equal(expected: 1, actual: i)); Assert.Equal(expected: 0, actual: lostMessages); }
private static IDictionary <string, uint> GetTopWordsDataFlow() { Console.WriteLine(nameof(GetTopWordsDataFlow) + "..."); const int WorkerCount = 12; var result = new ConcurrentDictionary <string, uint>(StringComparer.InvariantCultureIgnoreCase); const int BoundedCapacity = 10000; var bufferBlock = new BufferBlock <string>( new DataflowBlockOptions { BoundedCapacity = BoundedCapacity }); var splitLineToWordsBlock = new TransformManyBlock <string, string>( line => line.Split(Separators, StringSplitOptions.RemoveEmptyEntries), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 1, BoundedCapacity = BoundedCapacity }); var batchWordsBlock = new BatchBlock <string>(5000); var trackWordsOccurrencBlock = new ActionBlock <string[]>(words => { foreach (var word in words) { if (!IsValidWord(word)) { continue; } result.AddOrUpdate(word, 1, (key, oldVal) => oldVal + 1); } }, new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = WorkerCount }); var defaultLinkOptions = new DataflowLinkOptions { PropagateCompletion = true }; bufferBlock.LinkTo(splitLineToWordsBlock, defaultLinkOptions); splitLineToWordsBlock.LinkTo(batchWordsBlock, defaultLinkOptions); batchWordsBlock.LinkTo(trackWordsOccurrencBlock, defaultLinkOptions); // Begin producing foreach (var line in File.ReadLines(InputFile.FullName)) { bufferBlock.SendAsync(line).Wait(); } bufferBlock.Complete(); // End of producing // Wait for workers to finish their work trackWordsOccurrencBlock.Completion.Wait(); return(result .OrderByDescending(kv => kv.Value) .Take((int)TopCount) .ToDictionary(kv => kv.Key, kv => kv.Value)); }