Exemplo n.º 1
0
        public void RunBoundingTests()
        {
            var options = new DataflowBlockOptions() { BoundedCapacity = ITargetBlockTestHelper.BOUNDED_CAPACITY };
            var executionOptions = new ExecutionDataflowBlockOptions() { BoundedCapacity = ITargetBlockTestHelper.BOUNDED_CAPACITY };
            var greedyOptions = new GroupingDataflowBlockOptions() { BoundedCapacity = ITargetBlockTestHelper.BOUNDED_CAPACITY, Greedy = true };
            var nonGreedyOptions = new GroupingDataflowBlockOptions() { BoundedCapacity = ITargetBlockTestHelper.BOUNDED_CAPACITY, Greedy = false };

            // "Normal" target blocks
            Assert.True(ITargetBlockTestHelper.TestBoundingTarget<int, int>(new ActionBlock<int>((Action<int>)ITargetBlockTestHelper.BoundingAction, executionOptions), greedy: true));

            // BatchBlock
            Assert.True(ITargetBlockTestHelper.TestBoundingTarget<int, int[]>(new BatchBlock<int>(ITargetBlockTestHelper.BOUNDED_CAPACITY, greedyOptions), greedy: true));
            Assert.True(ITargetBlockTestHelper.TestBoundingTarget<int, int[]>(new BatchBlock<int>(ITargetBlockTestHelper.BOUNDED_CAPACITY, nonGreedyOptions), greedy: false));

            // JoinBlock
            Assert.True(ITargetBlockTestHelper.TestBoundingJoin2<int>(new JoinBlock<int, int>(greedyOptions), greedy: true));
            Assert.True(ITargetBlockTestHelper.TestBoundingJoin3<int>(new JoinBlock<int, int, int>(nonGreedyOptions), greedy: false));

            // JoinBlock.Target
            Assert.True(ITargetBlockTestHelper.TestBoundingGreedyJoinTarget2<int>(new JoinBlock<int, int>(greedyOptions), testedTargetIndex: 1));
            Assert.True(ITargetBlockTestHelper.TestBoundingGreedyJoinTarget3<int>(new JoinBlock<int, int, int>(greedyOptions), testedTargetIndex: 2));
        }
Exemplo n.º 2
0
        public async Task CompressAndEncrypt(
            Stream streamSource, Stream streamDestination,
            CancellationTokenSource cts = null)
        {
            cts = cts ?? new CancellationTokenSource();

            var compressorOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = Environment.ProcessorCount,
                BoundedCapacity        = _boundedCapacity,
                CancellationToken      = cts.Token
            };

            var inputBuffer = new BufferBlock <CompressingDetails>(
                new DataflowBlockOptions
            {
                CancellationToken = cts.Token,
                BoundedCapacity   = _boundedCapacity
            });

            var compressor = new TransformBlock <CompressingDetails, CompressedDetails>(
                async details =>
            {
                Chunk compressedData = await Compress(details.Bytes);
                await Pool.Send(details.Bytes);

                return(new CompressedDetails
                {
                    Bytes = compressedData,
                    ChunkSize = details.ChunkSize,
                    Sequence = details.Sequence,
                    CompressedDataSize = new Chunk(BitConverter.GetBytes(compressedData.Length))
                });
            }, compressorOptions);

            var encryptor = new TransformBlock <CompressedDetails, EncryptDetails>(
                async details =>
            {
                var data = await CombineByteArrays(details.CompressedDataSize, details.ChunkSize, details.Bytes);
                await Pool.Send(details.Bytes);

                var encryptedData = await Encrypt(data);
                await Pool.Send(data);

                return(new EncryptDetails
                {
                    Bytes = encryptedData,
                    Sequence = details.Sequence,
                    EncryptedDataSize = new Chunk(BitConverter.GetBytes(encryptedData.Length))
                });
            }, compressorOptions);

            var asOrderedAgent = Agent.Start((new Dictionary <int, EncryptDetails>(), 0),
                                             async((Dictionary <int, EncryptDetails>, int)state, EncryptDetails msg) =>
            {
                (Dictionary <int, EncryptDetails> details, int lastIndexProc) = state;
                details.Add(msg.Sequence, msg);
                while (details.ContainsKey(lastIndexProc + 1))
                {
                    msg = details[lastIndexProc + 1];
                    await streamDestination.WriteAsync(msg.EncryptedDataSize.Bytes, 0, msg.EncryptedDataSize.Length);
                    await streamDestination.WriteAsync(msg.Bytes.Bytes, 0, msg.Bytes.Length);
                    await Pool.Send(msg.Bytes);
                    lastIndexProc = msg.Sequence;
                    details.Remove(lastIndexProc);
                }
                return(details, lastIndexProc);
            }, cts);
Exemplo n.º 3
0
 private IPropagatorBlock <LoadedScene, RenderedSceneTile <Rgba32> > CreateTileRenderer(ExecutionDataflowBlockOptions dataflowOptions) =>
Exemplo n.º 4
0
        /// <summary>
        /// Parallelize I/O with the same semantics as the single-threaded copy method above.
        /// ResolveAssemblyReferences tends to generate longer and longer lists of files to send
        /// to CopyTask as we get further and further down the dependency graph.
        /// The OS can handle a lot of parallel I/O so let's minimize wall clock time to get
        /// it all done.
        /// </summary>
        private bool CopyParallel(
            CopyFileWithState copyFile,
            int parallelism,
            out List <ITaskItem> destinationFilesSuccessfullyCopied)
        {
            bool success = true;

            // We must supply the same semantics as the single-threaded version above:
            //
            // - For copy operations in the list that have the same destination, we must
            //   provide for in-order copy attempts that allow re-copying different files
            //   and avoiding copies for later files that match SkipUnchangedFiles semantics.
            //   We must also add a destination file copy item for each attempt.
            // - The order of entries in destinationFilesSuccessfullyCopied must match
            //   the order of entries passed in, along with copied metadata.
            // - Metadata must not be copied to destination item if the copy operation failed.
            //
            // We split the work into different Tasks:
            //
            // - Entries with unique destination file paths each get their own parallel operation.
            // - Each subset of copies into the same destination get their own Task to run
            //   the single-threaded logic in order.
            //
            // At the end we reassemble the result list in the same order as was passed in.

            // Map: Destination path -> indexes in SourceFiles/DestinationItems array indices (ordered low->high).
            var partitionsByDestination = new Dictionary <string, List <int> >(
                DestinationFiles.Length, // Set length to common case of 1:1 source->dest.
                StringComparer.OrdinalIgnoreCase);

            for (int i = 0; i < SourceFiles.Length && !_cancellationTokenSource.IsCancellationRequested; ++i)
            {
                ITaskItem destItem = DestinationFiles[i];
                string    destPath = destItem.ItemSpec;
                if (!partitionsByDestination.TryGetValue(destPath, out List <int> sourceIndices))
                {
                    // Use 1 for list length - common case is for no destination overlap.
                    sourceIndices = new List <int>(1);
                    partitionsByDestination[destPath] = sourceIndices;
                }
                sourceIndices.Add(i);
            }

            // Lockless flags updated from each thread - each needs to be a processor word for atomicity.
            var successFlags       = new IntPtr[DestinationFiles.Length];
            var actionBlockOptions = new ExecutionDataflowBlockOptions
            {
                MaxDegreeOfParallelism = parallelism,
                CancellationToken      = _cancellationTokenSource.Token
            };
            var partitionCopyActionBlock = new ActionBlock <List <int> >(
                async(List <int> partition) =>
            {
                // Break from synchronous thread context of caller to get onto thread pool thread.
                await System.Threading.Tasks.Task.Yield();

                for (int partitionIndex = 0; partitionIndex < partition.Count && !_cancellationTokenSource.IsCancellationRequested; partitionIndex++)
                {
                    int fileIndex        = partition[partitionIndex];
                    ITaskItem sourceItem = SourceFiles[fileIndex];
                    ITaskItem destItem   = DestinationFiles[fileIndex];
                    string sourcePath    = sourceItem.ItemSpec;

                    // Check if we just copied from this location to the destination, don't copy again.
                    bool copyComplete = partitionIndex > 0 &&
                                        String.Equals(
                        sourcePath,
                        SourceFiles[partition[partitionIndex - 1]].ItemSpec,
                        StringComparison.OrdinalIgnoreCase);

                    if (!copyComplete)
                    {
                        if (DoCopyIfNecessary(
                                new FileState(sourceItem.ItemSpec),
                                new FileState(destItem.ItemSpec),
                                copyFile))
                        {
                            copyComplete = true;
                        }
                        else
                        {
                            // Thread race to set outer variable but they race to set the same (false) value.
                            success = false;
                        }
                    }

                    if (copyComplete)
                    {
                        sourceItem.CopyMetadataTo(destItem);
                        successFlags[fileIndex] = (IntPtr)1;
                    }
                }
            },
                actionBlockOptions);

            foreach (List <int> partition in partitionsByDestination.Values)
            {
                bool partitionAccepted = partitionCopyActionBlock.Post(partition);
                if (!partitionAccepted)
                {
                    // Retail assert...
                    ErrorUtilities.VerifyThrow(false,
                                               "Failed posting a file copy to an ActionBlock. Should not happen with block at max int capacity.");
                }
            }

            partitionCopyActionBlock.Complete();
            partitionCopyActionBlock.Completion.GetAwaiter().GetResult();

            // Assemble an in-order list of destination items that succeeded.
            destinationFilesSuccessfullyCopied = new List <ITaskItem>(DestinationFiles.Length);
            for (int i = 0; i < successFlags.Length; i++)
            {
                if (successFlags[i] != (IntPtr)0)
                {
                    destinationFilesSuccessfullyCopied.Add(DestinationFiles[i]);
                }
            }

            return(success);
        }
Exemplo n.º 5
0
        public GetDataBlock(World world, ImmutableDictionary <LevelDbWorldKey2, KeyAndCrc> renderedSubChunks, ExecutionDataflowBlockOptions options, bool forceOverwrite)
        {
            this.forceOverwrite = forceOverwrite;
            Block = new TransformManyBlock <IEnumerable <GroupedChunkSubKeys>, IEnumerable <ChunkData> >(
                groupedChunkSubKeys =>
            {
                var outerList         = new List <List <ChunkData> >();
                var chunkList         = new List <ChunkData>();
                bool renderThisChunks = false;
                foreach (var chunkSubKeys in groupedChunkSubKeys)
                {
                    var data = world.GetChunkData(chunkSubKeys);

                    foreach (var subKey in data.SubChunks)
                    {
                        if (renderedSubChunks.Count > 0 && renderedSubChunks.TryGetValue(new LevelDbWorldKey2(subKey.Key), out KeyAndCrc crc32))
                        {
                            subKey.FoundInDb   = true;
                            subKey.ForeignDbId = crc32.DbId;

                            if (forceOverwrite || (crc32.Crc32 != subKey.Crc32))
                            {
                                renderThisChunks = true;
                            }
                        }
                        else
                        {
                            renderThisChunks = true;
                        }
                    }

                    chunkList.Add(data);
                }
                if (renderThisChunks)
                {
                    outerList.Add(chunkList);
                }

                ProcessedCount++;
                return(outerList);
            }, options);
        }
Exemplo n.º 6
0
        [InlineData(2, 1, false)] // no force ordered, but dop == 1, so it doesn't matter
        public async Task TestOrdering_Async_OrderedEnabled(int mmpt, int dop, bool? EnsureOrdered)
        {
            const int iters = 1000;

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt };
            if (EnsureOrdered == null)
            {
                Assert.True(options.EnsureOrdered);
            }
            else
            {
                options.EnsureOrdered = EnsureOrdered.Value;
            }

            var tb = new TransformBlock<int, int>(i => Task.FromResult(i), options);
            tb.PostRange(0, iters);
            for (int i = 0; i < iters; i++)
            {
                Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
            }
            tb.Complete();
            await tb.Completion;
        }
Exemplo n.º 7
0
 public async Task TestOrdering()
 {
     const int iters = 1000;
     foreach (int mmpt in new[] { DataflowBlockOptions.Unbounded, 1 })
     foreach (int dop in new[] { 1, 2, DataflowBlockOptions.Unbounded })
     {
         var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, MaxMessagesPerTask = mmpt };
         var tb = new TransformBlock<int, int>(i => i, options);
         tb.PostRange(0, iters);
         for (int i = 0; i < iters; i++)
         {
             Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
         }
         tb.Complete();
         await tb.Completion;
     }
 }
Exemplo n.º 8
0
        public async Task TestPrecanceledToken()
        {
            var options = new ExecutionDataflowBlockOptions { CancellationToken = new CancellationToken(true) };
            var blocks = new []
            {
                new ActionBlock<int>(i => { }, options),
                new ActionBlock<int>(i => Task.FromResult(0), options)
            };

            foreach (ActionBlock<int> ab in blocks)
            {
                Assert.False(ab.Post(42));
                Assert.Equal(expected: 0, actual: ab.InputCount);
                Assert.NotNull(ab.Completion);

                ab.Complete();
                ((IDataflowBlock)ab).Fault(new Exception());

                await Assert.ThrowsAnyAsync<OperationCanceledException>(() => ab.Completion);
            }
        }
Exemplo n.º 9
0
        // C# example
        public static async Task RunFuzzyMatchAgentCSharp(string[] wordsLookup, IList <string> files)
        {
            var cts = new CancellationTokenSource();
            var opt = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity        = 10,
                MaxDegreeOfParallelism = 4,
                CancellationToken      = cts.Token
            };

            var inputBlock = new BufferBlock <string>(opt);

            var readLinesBlock =
                new TransformBlock <string, string>(
                    async file => await File.ReadAllTextAsync(file, cts.Token), opt);

            var splitWordsBlock =
                new TransformBlock <string, string[]>(
                    text => WordRegex.Value.Split(text).Where(w => !IgnoreWords.Contains(w)).AsSet().ToArray(), opt);

            var foundMatchesBlock =
                new TransformBlock <string[], WordDistanceStruct[]>(async wordSet =>
            {
                var matches =
                    await wordsLookup.Traverse(wl => JaroWinklerModule.bestMatchTask(wordSet, wl, threshold));
                return(matches.Flatten().ToArray());
            }, opt);


            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            // TODO (7) (for C#)
            // Implement a stateful agent using the TPL Dataflow.
            // The Agent should have an internal state protected from external access.
            // The function passed in the constractor applies a project/reduce to the incoming messages and in the current state,
            // to return a new state
            // (see AgentAggregator.cs)
            var agent = Agent.Start(new Dictionary <string, HashSet <string> >(),
                                    (Dictionary <string, HashSet <string> > state, WordDistanceStruct[] matches) =>
            {
                var matchesDic = matches
                                 .GroupBy(w => w.Word)
                                 .ToDictionary(
                    k => k.Key,
                    v => v.Select(w => w.Match).AsSet());

                var newState = Clone(state);
                foreach (var match in matchesDic)
                {
                    if (newState.TryGetValue(match.Key, out HashSet <string> values))
                    {
                        values.AddRange(match.Value);
                        newState[match.Key] = values;
                    }
                    else
                    {
                        newState.Add(match.Key, match.Value);
                    }
                }

                return(newState);
            });

            IDisposable disposeAll = new CompositeDisposable(
                inputBlock.LinkTo(readLinesBlock, linkOptions),
                readLinesBlock.LinkTo(splitWordsBlock, linkOptions),
                splitWordsBlock.LinkTo(foundMatchesBlock, linkOptions),
                foundMatchesBlock.LinkTo(agent),
                agent.AsObservable()
                .Subscribe(
                    summaryMathces => PrintSummary(summaryMathces))
                );

            cts.Token.Register(disposeAll.Dispose);

            foreach (var file in files)
            {
                await inputBlock.SendAsync(file, cts.Token);
            }

            //  inputBlock.Complete();
            //  await foundMatchesBlock.Completion.ContinueWith(_ =>
            //      disposeAll.Dispose());
        }
Exemplo n.º 10
0
        public static async Task RunFuzzyMatchDataFlow(string[] wordsLookup, IList <string> files)
        {
            var cts = new CancellationTokenSource();
            var opt = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity = 10,
                // TODO, change this value and check what is happening
                MaxDegreeOfParallelism = 1,
                CancellationToken      = cts.Token
            };

            int fileCount = files.Count;

            var inputBlock = new BufferBlock <string>(opt);

            var readLinesBlock =
                new TransformBlock <string, string>(
                    async file => await File.ReadAllTextAsync(file, cts.Token), opt);

            var splitWordsBlock =
                new TransformBlock <string, HashSet <string> >(
                    text => WordRegex.Value.Split(text).Where(w => !IgnoreWords.Contains(w)).AsSet(), opt);

            var batch =
                new BatchBlock <HashSet <string> >(fileCount);

            var foundMatchesBlock =
                new TransformBlock <HashSet <string>[], WordDistanceStruct[]>(
                    async wordSet =>
            {
                var wordSetFlatten = wordSet.Flatten().AsSet();
                var matches        =
                    await wordsLookup.Traverse(wl =>
                                               JaroWinklerModule.bestMatchTask(wordSetFlatten, wl, threshold));
                return(matches.Flatten().ToArray());
            }, opt);


            // TODO (5)
            // Implement a block name "printBlock", which prints the output of
            // the foundMatchesBlock using the "PrintSummary" method
            // Then link the block to the "foundMatchesBlock" block
            // var printBlock = // missing code

            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            IDisposable disposeAll = new CompositeDisposable(
                inputBlock.LinkTo(readLinesBlock, linkOptions),
                readLinesBlock.LinkTo(splitWordsBlock, linkOptions),
                splitWordsBlock.LinkTo(batch, linkOptions),
                batch.LinkTo(foundMatchesBlock, linkOptions)
                // TODO uncoment this code after
                // implemented TODO (5)
                // foundMatchesBlock.LinkTo(printBlock)
                );

            cts.Token.Register(disposeAll.Dispose);

            // TODO (6)
            // After have completed TODO (5), remove or unlink the printBlock, and replace the output of the "foundMatchesBlock" block
            // with Reactive Extensions "AsObservable", maintaining the call to the "PrintSummary" method


            foreach (var file in files)
            {
                await inputBlock.SendAsync(file, cts.Token);
            }

            inputBlock.Complete();
            await foundMatchesBlock.Completion.ContinueWith(_ => disposeAll.Dispose());
        }
Exemplo n.º 11
0
        // Example F#
        public static async Task RunFuzzyMatchAgentFSharp(string[] wordsLookup, IList <string> files)
        {
            var cts = new CancellationTokenSource();
            var opt = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity        = 10,
                MaxDegreeOfParallelism = 4,
                CancellationToken      = cts.Token
            };

            var inputBlock = new BufferBlock <string>(opt);

            var readLinesBlock =
                new TransformBlock <string, string>(
                    file => File.ReadAllTextAsync(file, cts.Token), opt);

            var splitWordsBlock = new TransformBlock <string, string[]>(
                text => WordRegex.Value.Split(text).Where(w => !IgnoreWords.Contains(w)).AsSet().ToArray(), opt);

            var foundMatchesBlock =
                new TransformBlock <string[], WordDistanceStruct[]>(async wordSet =>
            {
                var matches =
                    await wordsLookup.Traverse(wl => JaroWinklerModule.bestMatchTask(wordSet, wl, threshold));
                return(matches.Flatten().ToArray());
            }, opt);


            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            // TODO (7) (for F#)
            // Implement a Reactive MailboxProcessor in F#.
            // Go to the Fsharp project, Module 3 and follow the instructions (7.a)
            // then, uncomment the following code and remove the previous code that uses
            // the Agent based on TPL Dataflow

            var agent =
                new ReactiveAgent.AgentObservable <WordDistanceStruct[], Dictionary <string, HashSet <string> > >
                    (new Dictionary <string, HashSet <string> >(),
                    (state, matches) =>
            {
                var matchesDic = matches
                                 .GroupBy(w => w.Word).ToDictionary(k => k.Key,
                                                                    v => v.Select(w => w.Match).AsSet());

                // Clone is important to be race condition free
                // or use an immutable collection
                var newState = Clone(state);
                foreach (var match in matchesDic)
                {
                    if (newState.TryGetValue(match.Key, out HashSet <string> values))
                    {
                        values.AddRange(match.Value);
                        newState[match.Key] = values;
                    }
                    else
                    {
                        newState.Add(match.Key, match.Value);
                    }
                }

                return(newState);
            });



            IDisposable disposeAll = new CompositeDisposable(
                inputBlock.LinkTo(readLinesBlock, linkOptions),
                readLinesBlock.LinkTo(splitWordsBlock, linkOptions),
                splitWordsBlock.LinkTo(foundMatchesBlock, linkOptions),
                foundMatchesBlock.LinkTo(agent),
                agent.AsObservable().Subscribe(
                    summaryMathces => PrintSummary(summaryMathces))
                );

            cts.Token.Register(disposeAll.Dispose);

            foreach (var file in files)
            {
                await inputBlock.SendAsync(file, cts.Token);
            }

            // inputBlock.Complete();
            // await foundMatchesBlock.Completion.ContinueWith(_ => disposeAll.Dispose());
        }
        public async Task TestArrayListReusePossibleForDop1()
        {
            foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 2 })
            {
                foreach (bool sync in DataflowTestHelpers.BooleanValues)
                {
                    foreach (int dop in new[] { 1, Environment.ProcessorCount })
                    {
                        var dbo = new ExecutionDataflowBlockOptions {
                            BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop
                        };
                        foreach (IList <int> list in new IList <int>[] { new int[1], new List <int> {
                                                                             0
                                                                         }, new Collection <int> {
                                                                             0
                                                                         } })
                        {
                            int nextExpectedValue = 1;

                            TransformManyBlock <int, int>  transform = null;
                            Func <int, IEnumerable <int> > body      = i => {
                                if (i == 100) // we're done iterating
                                {
                                    transform.Complete();
                                    return((IEnumerable <int>)null);
                                }
                                else if (dop == 1)
                                {
                                    list[0] = i + 1; // reuse the list over and over, but only at dop == 1
                                    return((IEnumerable <int>)list);
                                }
                                else if (list is int[])
                                {
                                    return(new int[1] {
                                        i + 1
                                    });
                                }
                                else if (list is List <int> )
                                {
                                    return(new List <int>()
                                    {
                                        i + 1
                                    });
                                }
                                else
                                {
                                    return(new Collection <int>()
                                    {
                                        i + 1
                                    });
                                }
                            };

                            transform = sync ?
                                        new TransformManyBlock <int, int>(body, dbo) :
                                        new TransformManyBlock <int, int>(i => Task.Run(() => body(i)), dbo);

                            TransformBlock <int, int> verifier = new TransformBlock <int, int>(i => {
                                Assert.Equal(expected: nextExpectedValue, actual: i);
                                nextExpectedValue++;
                                return(i);
                            });

                            transform.LinkTo(verifier);
                            verifier.LinkTo(transform);

                            await transform.SendAsync(0);

                            await transform.Completion;
                        }
                    }
                }
            }
        }
Exemplo n.º 13
0
        public override async Task <HttpHandlerResult> GetAsync(CancellationToken token)
        {
            var _names = GetQuery("names");

            try {
                // Get Agent Names
                string[] agentNames = null;

                if (!string.IsNullOrEmpty(_names))
                {
                    agentNames = ParseNames(_names).ToArray();
                }

                var agents = PhotonServer.Instance.Agents.All
                             .Where(x => IncludesAgent(agentNames, x)).ToArray();

                if (!agents.Any())
                {
                    throw new ApplicationException("No agents were found!");
                }

                // Get Agent Versions
                var versionMap = new ConcurrentDictionary <string, AgentVersionInfo>();

                var blockOptions = new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = Configuration.Parallelism,
                    CancellationToken      = token,
                };

                var block = new ActionBlock <ServerAgent>(async agent => {
                    var result = new AgentVersionInfo {
                        Name = agent.Name,
                    };

                    try {
                        result.Version = await GetAgentVersion(agent, token);
                    }
                    catch (Exception error) {
                        result.Exception = error.UnfoldMessages();
                    }

                    versionMap[agent.Id] = result;
                }, blockOptions);

                foreach (var agent in agents)
                {
                    block.Post(agent);
                }

                block.Complete();
                await block.Completion;

                // Send Response
                var response = new HttpAgentVersionListResponse {
                    VersionList = versionMap.Select(x => new AgentVersionResponse {
                        AgentId      = x.Key,
                        AgentName    = x.Value.Name,
                        AgentVersion = x.Value.Version,
                        Exception    = x.Value.Exception,
                    }).ToArray(),
                };

                return(Response.Json(response));
            }
            catch (Exception error) {
                Log.Error("Failed to run Update-Task!", error);
                return(Response.Exception(error));
            }
        }
Exemplo n.º 14
0
        void InitializePipeline(int channels, int sampleRate)
        {
            Contract.Requires(channels > 0);
            Contract.Requires(sampleRate > 0);

            var boundedExecutionBlockOptions = new ExecutionDataflowBlockOptions
            {
                BoundedCapacity           = _boundedCapacity,
                SingleProducerConstrained = true
            };
            var unboundedExecutionBlockOptions = new ExecutionDataflowBlockOptions {
                SingleProducerConstrained = true
            };
            var propogateLinkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            // First, resize the sample count collections to the desired window size:
            var sampleCountFilter = new SampleCountFilter(channels, (int)Math.Round(sampleRate * _rmsWindowTime));

            _filterSampleCountBlock =
                new TransformManyBlock <SampleCollection, SampleCollection>(input => sampleCountFilter.Process(input),
                                                                            boundedExecutionBlockOptions);

            // Calculate the track peaks:
            var peakDetector           = new PeakDetector();
            var analyzeTrackPeaksBlock = new TransformBlock <SampleCollection, Tuple <SampleCollection, float> >(input =>
            {
                peakDetector.Submit(input);
                return(Tuple.Create(input, peakDetector.Peak));
            }, boundedExecutionBlockOptions);

            _filterSampleCountBlock.LinkTo(analyzeTrackPeaksBlock, propogateLinkOptions);

            // Down-convert certain samples rates (easy multiples) that aren't directly supported by ReplayGain:
            var sampleRateConverter    = new SampleRateConverter(sampleRate);
            var convertSampleRateBlock =
                new TransformBlock <Tuple <SampleCollection, float>, Tuple <SampleCollection, float> >(input =>
            {
                SampleCollection result = sampleRateConverter.Convert(input.Item1);
                return(Tuple.Create(result, input.Item2));
            }, boundedExecutionBlockOptions);

            analyzeTrackPeaksBlock.LinkTo(convertSampleRateBlock, propogateLinkOptions);

            // Filter the samples:
            var butterworthFilter      = new ButterworthFilter(sampleRate);
            var butterworthFilterBlock =
                new TransformBlock <Tuple <SampleCollection, float>, Tuple <SampleCollection, float> >(input =>
            {
                butterworthFilter.Process(input.Item1);
                return(input);
            }, boundedExecutionBlockOptions);

            convertSampleRateBlock.LinkTo(butterworthFilterBlock, propogateLinkOptions);

            var yuleWalkFilter      = new YuleWalkFilter(sampleRate);
            var yuleWalkFilterBlock =
                new TransformBlock <Tuple <SampleCollection, float>, Tuple <SampleCollection, float> >(input =>
            {
                yuleWalkFilter.Process(input.Item1);
                return(input);
            }, boundedExecutionBlockOptions);

            butterworthFilterBlock.LinkTo(yuleWalkFilterBlock, propogateLinkOptions);

            // Calculate the root mean square for each filtered window:
            var calculateRmsBlock =
                new TransformBlock <Tuple <SampleCollection, float>, Tuple <SampleCollection, float, float> >(input =>
                                                                                                              Tuple.Create(input.Item1, input.Item2, input.Item1.IsLast
                        ? float.NaN
                        : CalculateRms(input.Item1)), boundedExecutionBlockOptions);

            yuleWalkFilterBlock.LinkTo(calculateRmsBlock, propogateLinkOptions);

            // Free the sample collections once they are no longer needed:
            var freeSampleCollectionsBlock =
                new TransformBlock <Tuple <SampleCollection, float, float>, Tuple <float, float> >(input =>
            {
                SampleCollectionFactory.Instance.Free(input.Item1);
                return(Tuple.Create(input.Item2, input.Item3));
            }, boundedExecutionBlockOptions);

            calculateRmsBlock.LinkTo(freeSampleCollectionsBlock, propogateLinkOptions);

            // Broadcast the RMS values:
            var broadcastRmsBlock =
                new BroadcastBlock <Tuple <float, float> >(input => Tuple.Create(input.Item1, input.Item2));

            freeSampleCollectionsBlock.LinkTo(broadcastRmsBlock, propogateLinkOptions);

            // Calculate the album gain:
            broadcastRmsBlock.LinkTo(_albumComponent.InputBlock);

            // Calculate the track gain:
            var windowSelector        = new WindowSelector();
            var analyzeTrackGainBlock = new TransformBlock <Tuple <float, float>, Tuple <float, float> >(input =>
            {
                if (float.IsNaN(input.Item2))
                {
                    return(Tuple.Create(input.Item1, windowSelector.GetResult()));
                }
                windowSelector.Submit(input.Item2);
                return(Tuple.Create(input.Item1, float.NaN));
            }, unboundedExecutionBlockOptions);

            broadcastRmsBlock.LinkTo(analyzeTrackGainBlock, propogateLinkOptions);

            // Join the track and album peak and gain values all together:
            var joinResultsBlock = new JoinBlock <Tuple <float, float>, Tuple <float, float> >();

            analyzeTrackGainBlock.LinkTo(DataflowBlock.NullTarget <Tuple <float, float> >(),
                                         result => float.IsNaN(result.Item2));
            analyzeTrackGainBlock.LinkTo(joinResultsBlock.Target1, propogateLinkOptions,
                                         result => !float.IsNaN(result.Item2));
            _albumComponent.OutputBlock.LinkTo(joinResultsBlock.Target2, propogateLinkOptions);

            // Convert the results:
            var convertToMetadataBlock =
                new TransformBlock <Tuple <Tuple <float, float>, Tuple <float, float> >, MetadataDictionary>(input =>
            {
                var result = new MetadataDictionary
                {
                    ["TrackPeak"] = ConvertPeakToString(input.Item1.Item1),
                    ["TrackGain"] = ConvertGainToString(input.Item1.Item2),
                    ["AlbumPeak"] = ConvertPeakToString(input.Item2.Item1),
                    ["AlbumGain"] = ConvertGainToString(input.Item2.Item2)
                };
                return(result);
            }, unboundedExecutionBlockOptions);

            joinResultsBlock.LinkTo(convertToMetadataBlock, propogateLinkOptions);

            // Buffer the results:
            _bufferResultsBlock = new BufferBlock <MetadataDictionary>();
            convertToMetadataBlock.LinkTo(_bufferResultsBlock, propogateLinkOptions);
        }
Exemplo n.º 15
0
        public async Task TestInputCount()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                Barrier barrier1 = new Barrier(2), barrier2 = new Barrier(2);
                var options = new ExecutionDataflowBlockOptions { SingleProducerConstrained = singleProducerConstrained };
                Action<int> body = _ => {
                    barrier1.SignalAndWait();
                    // will test InputCount here
                    barrier2.SignalAndWait();
                };

                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(body, options) :
                    new ActionBlock<int>(i => Task.Run(() => body(i)), options);

                for (int iter = 0; iter < 2; iter++)
                {
                    ab.PostItems(1, 2);
                    for (int i = 1; i >= 0; i--)
                    {
                        barrier1.SignalAndWait();
                        Assert.Equal(expected: i, actual: ab.InputCount);
                        barrier2.SignalAndWait();
                    }
                }

                ab.Complete();
                await ab.Completion;
            }
        }
Exemplo n.º 16
0
        public async Task TestNonGreedy()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                var barrier1 = new Barrier(2);
                Action<int> body = _ => barrier1.SignalAndWait();
                var options = new ExecutionDataflowBlockOptions { BoundedCapacity = 1 };

                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(body, options) :
                    new ActionBlock<int>(i => Task.Run(() => body(i)), options);

                Task<bool>[] sends = Enumerable.Range(0, 10).Select(i => ab.SendAsync(i)).ToArray();
                for (int i = 0; i < sends.Length; i++)
                {
                    Assert.True(sends[i].Result); // Next send should have completed and with the value successfully accepted
                    for (int j = i + 1; j < sends.Length; j++) // No further sends should have completed yet
                    {
                        Assert.False(sends[j].IsCompleted);
                    }
                    barrier1.SignalAndWait();
                }

                ab.Complete();
                await ab.Completion;
            }
        }
Exemplo n.º 17
0
        private static async Task MainAsync(string[] args)
        {
            var db = new DatabaseConnection();

            var queue = new BufferBlock <IList <RockSong> >(new DataflowBlockOptions {
                BoundedCapacity = 5
            });

            var consumerOptions = new ExecutionDataflowBlockOptions {
                BoundedCapacity = 1
            };

            var consumer1 = new ActionBlock <IList <RockSong> >((songList) =>
            {
                var options = new CqlQueryOptions().SetConsistencyLevel(ConsistencyLevel.LocalOne);
                foreach (var rockSong in songList)
                {
                    db.Mapper.Insert <RockSong>(rockSong, options);
                }

                Console.WriteLine($"Consumer 1 | pagedList count: {songList.Count}");
            }, consumerOptions);

            var consumer2 = new ActionBlock <IList <RockSong> >((songList) =>
            {
                var options = new CqlQueryOptions().SetConsistencyLevel(ConsistencyLevel.LocalOne);
                foreach (var rockSong in songList)
                {
                    db.Mapper.Insert <RockSong>(rockSong, options);
                }

                Console.WriteLine($"Consumer 2 | pagedList count: {songList.Count}");
            }, consumerOptions);

            var consumer3 = new ActionBlock <IList <RockSong> >((songList) =>
            {
                var options = new CqlQueryOptions().SetConsistencyLevel(ConsistencyLevel.LocalOne);
                foreach (var rockSong in songList)
                {
                    db.Mapper.Insert <RockSong>(rockSong, options);
                }

                Console.WriteLine($"Consumer 3 | pagedList count: {songList.Count}");
            }, consumerOptions);

            var linkoptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };

            queue.LinkTo(consumer1, linkoptions);
            queue.LinkTo(consumer2, linkoptions);
            queue.LinkTo(consumer3, linkoptions);

            Console.WriteLine("Start Producing");
            var start = DateTime.UtcNow;

            await ProduceAsync(queue).ConfigureAwait(false);

            await Task.WhenAll(consumer1.Completion, consumer2.Completion, consumer3.Completion);

            Console.WriteLine($"Duration: {DateTime.UtcNow - start}");

            Console.ReadLine();
        }
Exemplo n.º 18
0
        public async Task TestParallelExecution()
        {
            int dop = 2;
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                Barrier barrier = new Barrier(dop);
                var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = dop, SingleProducerConstrained = singleProducerConstrained };
                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(_ => barrier.SignalAndWait(), options) :
                    new ActionBlock<int>(_ => Task.Run(() => barrier.SignalAndWait()), options);

                int iters = dop * 4;
                ab.PostRange(0, iters);
                ab.Complete();
                await ab.Completion;
            }
        }
Exemplo n.º 19
0
        CreateTransformBlock <TParent, TInputItem, TOutputItem>
            ([NotNull] Func <TParent, TInputItem, TOutputItem> process,
            ExecutionDataflowBlockOptions options = null,
            Action <Exception, SplitJoinItem <TParent, TInputItem> > defaultExceptionLogger = null)
        {
            if (process == null)
            {
                throw new ArgumentNullException(nameof(process));
            }

            var block = new TransformBlock <SplitJoinItem <TParent, TInputItem>, SplitJoinItem <TParent, TOutputItem> >
                            (splitJoinItem =>
            {
                if (splitJoinItem == null)
                {
                    throw new ArgumentNullException(nameof(splitJoinItem));
                }

                if (splitJoinItem.Result == SplitJoinItemResult.Failure)
                {
                    var new_split_join_item = new SplitJoinItem <TParent, TOutputItem> (splitJoinItem.Parent,
                                                                                        default(TOutputItem),
                                                                                        splitJoinItem.TotalItemsCount);

                    new_split_join_item.Failed(splitJoinItem.Exception);

                    return(new_split_join_item);
                }

                try
                {
                    var item = process(splitJoinItem.Parent, splitJoinItem.Item);

                    var new_split_join_item = new SplitJoinItem <TParent, TOutputItem> (splitJoinItem.Parent,
                                                                                        item,
                                                                                        splitJoinItem.TotalItemsCount);

                    new_split_join_item.CompletedSuccessfully();

                    return(new_split_join_item);
                }
                catch (Exception ex)
                {
                    var logger = splitJoinItem.Item as IDataflowErrorLogger;
                    if (logger != null)
                    {
                        logger.OnException(ex);
                    }
                    else if (defaultExceptionLogger != null)
                    {
                        defaultExceptionLogger(ex, splitJoinItem);
                    }

                    var new_split_join_item = new SplitJoinItem <TParent, TOutputItem> (splitJoinItem.Parent,
                                                                                        default(TOutputItem),
                                                                                        splitJoinItem.TotalItemsCount);

                    new_split_join_item.Failed(ex);

                    return(new_split_join_item);
                }
            },
                            options ?? new ExecutionDataflowBlockOptions());

            return(block);
        }
Exemplo n.º 20
0
        public async Task TestProducerConsumer()
        {
            foreach (TaskScheduler scheduler in new[] { TaskScheduler.Default, new ConcurrentExclusiveSchedulerPair().ConcurrentScheduler })
            foreach (int maxMessagesPerTask in new[] { DataflowBlockOptions.Unbounded, 1, 2 })
            foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 1, 2 })
            foreach (int dop in new[] { 1, 2 })
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                const int Messages = 100;
                var options = new ExecutionDataflowBlockOptions
                {
                    BoundedCapacity = boundedCapacity,
                    MaxDegreeOfParallelism = dop,
                    MaxMessagesPerTask = maxMessagesPerTask,
                    TaskScheduler = scheduler
                };
                TransformBlock<int, int> tb = sync ?
                    new TransformBlock<int, int>(i => i, options) :
                    new TransformBlock<int, int>(i => Task.Run(() => i), options);

                await Task.WhenAll(
                    Task.Run(async delegate { // consumer
                        int i = 0;
                        while (await tb.OutputAvailableAsync())
                        {
                            Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
                            i++;
                        }
                    }),
                    Task.Run(async delegate { // producer
                        for (int i = 0; i < Messages; i++)
                        {
                            await tb.SendAsync(i);
                        }
                        tb.Complete();
                    }));
            }
        }
Exemplo n.º 21
0
        /// <summary>
        /// Performs a specified asynchronous transform on each element of a sequence in parallel
        /// </summary>
        /// <typeparam name="TSource">The type of the elements in the sequence</typeparam>
        /// <typeparam name="TResult">The type rendered by the asynchronous transform</typeparam>
        /// <param name="source">The sequence</param>
        /// <param name="asyncSelector">The asynchronous transform</param>
        /// <param name="options">Manual Dataflow options</param>
        /// <returns>The results of the asynchronous transform on each element in no particular order</returns>
        public static async Task <IEnumerable <TResult> > DataflowSelectAsync <TSource, TResult>(this IEnumerable <TSource> source, Func <TSource, Task <TResult> > asyncSelector, ExecutionDataflowBlockOptions options)
        {
            if (source is null)
            {
                throw new ArgumentNullException(nameof(source));
            }
            var results        = new BlockingCollection <TResult>();
            var transformBlock = new TransformBlock <TSource, TResult>(asyncSelector, options);
            var actionBlock    = new ActionBlock <TResult>(result => results.Add(result), singleThreadBlock);

            transformBlock.LinkTo(actionBlock, propagateLink);
            foreach (var element in source)
            {
                transformBlock.Post(element);
            }
            transformBlock.Complete();
            await actionBlock.Completion.ConfigureAwait(false);

            return(results);
        }
Exemplo n.º 22
0
        public async Task TestOrdering_Sync_OrderedDisabled()
        {
            // If ordering were enabled, this test would hang.

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2, EnsureOrdered = false };

            var mres = new ManualResetEventSlim();
            var tb = new TransformBlock<int, int>(i =>
            {
                if (i == 0) mres.Wait();
                return i;
            }, options);
            tb.Post(0);
            tb.Post(1);

            Assert.Equal(1, await tb.ReceiveAsync());
            mres.Set();
            Assert.Equal(0, await tb.ReceiveAsync());

            tb.Complete();
            await tb.Completion;
        }
Exemplo n.º 23
0
        /// <summary>
        /// Performs a specified asynchronous action for each element of a sequence in parallel
        /// </summary>
        /// <typeparam name="TSource">The type of the elements in the sequence</typeparam>
        /// <param name="source">The sequence</param>
        /// <param name="asyncAction">The asynchronous action</param>
        /// <param name="options">Manual Dataflow options</param>
        public static Task DataflowForAllAsync <TSource>(this IEnumerable <TSource> source, Func <TSource, Task> asyncAction, ExecutionDataflowBlockOptions options)
        {
            if (source is null)
            {
                throw new ArgumentNullException(nameof(source));
            }
            var block = new ActionBlock <TSource>(asyncAction, options);

            foreach (var element in source)
            {
                block.Post(element);
            }
            block.Complete();
            return(block.Completion);
        }
Exemplo n.º 24
0
        public void TestDebuggerDisplaysAndTypeProxies()
        {
            // Test both canceled and non-canceled
            foreach (var ct in new[] { new CancellationToken(false), new CancellationToken(true) })
            {
                // Some blocks have different code paths for whether they're greedy or not.
                // This helps with code-coverage.
                var dboBuffering   = new DataflowBlockOptions();
                var dboNoBuffering = new DataflowBlockOptions()
                {
                    BoundedCapacity = 1
                };
                var dboExBuffering = new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = 2, CancellationToken = ct
                };
                var dboExSpsc = new ExecutionDataflowBlockOptions {
                    SingleProducerConstrained = true
                };
                var dboExNoBuffering = new ExecutionDataflowBlockOptions {
                    MaxDegreeOfParallelism = 2, BoundedCapacity = 1, CancellationToken = ct
                };
                var dboGroupGreedy    = new GroupingDataflowBlockOptions();
                var dboGroupNonGreedy = new GroupingDataflowBlockOptions {
                    Greedy = false
                };

                // Item1 == test DebuggerDisplay, Item2 == test DebuggerTypeProxy, Item3 == object
                var objectsToTest = new Tuple <bool, bool, object>[]
                {
                    // Primary Blocks
                    // (Don't test DebuggerTypeProxy on instances that may internally have async operations in progress)
                    Tuple.Create <bool, bool, object>(true, true, new ActionBlock <int>(i => {})),
                    Tuple.Create <bool, bool, object>(true, true, new ActionBlock <int>(i => {}, dboExBuffering)),
                    Tuple.Create <bool, bool, object>(true, true, new ActionBlock <int>(i => {}, dboExSpsc)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new ActionBlock <int>(i => {}, dboExNoBuffering), 2)),
                    Tuple.Create <bool, bool, object>(true, true, new TransformBlock <int, int>(i => i)),
                    Tuple.Create <bool, bool, object>(true, true, new TransformBlock <int, int>(i => i, dboExBuffering)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new TransformBlock <int, int>(i => i, dboExNoBuffering), 2)),
                    Tuple.Create <bool, bool, object>(true, true, new TransformManyBlock <int, int>(i => new [] { i })),
                    Tuple.Create <bool, bool, object>(true, true, new TransformManyBlock <int, int>(i => new [] { i }, dboExBuffering)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new TransformManyBlock <int, int>(i => new [] { i }, dboExNoBuffering), 2)),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>()),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>(new DataflowBlockOptions()
                    {
                        NameFormat = "none"
                    })),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>(new DataflowBlockOptions()
                    {
                        NameFormat = "foo={0}, bar={1}"
                    })),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>(new DataflowBlockOptions()
                    {
                        NameFormat = "foo={0}, bar={1}, kaboom={2}"
                    })),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>(dboBuffering)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new BufferBlock <int>(new DataflowBlockOptions {
                        BoundedCapacity = 10
                    }), 20)),
                    Tuple.Create <bool, bool, object>(true, true, new BroadcastBlock <int>(i => i)),
                    Tuple.Create <bool, bool, object>(true, true, new BroadcastBlock <int>(i => i, dboBuffering)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new BroadcastBlock <int>(i => i, dboNoBuffering), 20)),
                    Tuple.Create <bool, bool, object>(true, true, new WriteOnceBlock <int>(i => i)),
                    Tuple.Create <bool, bool, object>(true, false, SendAsyncMessages(new WriteOnceBlock <int>(i => i), 1)),
                    Tuple.Create <bool, bool, object>(true, true, new WriteOnceBlock <int>(i => i, dboBuffering)),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>()),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>(dboGroupGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>(dboGroupNonGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int, int>()),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int, int>(dboGroupGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int, int>(dboGroupNonGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchedJoinBlock <int, int>(42)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchedJoinBlock <int, int>(42, dboGroupGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchedJoinBlock <int, int, int>(42, dboGroupGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchBlock <int>(42)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchBlock <int>(42, dboGroupGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, new BatchBlock <int>(42, dboGroupNonGreedy)),
                    Tuple.Create <bool, bool, object>(true, true, DataflowBlock.Encapsulate <int, int>(new BufferBlock <int>(), new BufferBlock <int>())),
                    Tuple.Create <bool, bool, object>(true, true, new BufferBlock <int>().AsObservable()),

                    // Supporting and Internal Types
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new ActionBlock <int>(i => {}, dboExBuffering), "_defaultTarget")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new ActionBlock <int>(i => {}, dboExNoBuffering), "_defaultTarget")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(DebuggerAttributes.GetFieldValue(new ActionBlock <int>(i => {}), "_defaultTarget"), "_messages")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new ActionBlock <int>(i => {}, dboExSpsc), "_spscTarget")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(DebuggerAttributes.GetFieldValue(new ActionBlock <int>(i => {}, dboExSpsc), "_spscTarget"), "_messages")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BufferBlock <int>(), "_source")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BufferBlock <int>(new DataflowBlockOptions {
                        BoundedCapacity = 10
                    }), "_source")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new TransformBlock <int, int>(i => i, dboExBuffering), "_source")),
                    Tuple.Create <bool, bool, object>(true, true, DebuggerAttributes.GetFieldValue(new TransformBlock <int, int>(i => i, dboExNoBuffering), "_reorderingBuffer")),
                    Tuple.Create <bool, bool, object>(true, true, DebuggerAttributes.GetFieldValue(DebuggerAttributes.GetFieldValue(new TransformBlock <int, int>(i => i, dboExBuffering), "_source"), "_targetRegistry")),
                    Tuple.Create <bool, bool, object>(true, true, DebuggerAttributes.GetFieldValue(DebuggerAttributes.GetFieldValue(WithLinkedTarget <TransformBlock <int, int>, int>(new TransformBlock <int, int>(i => i, dboExNoBuffering)), "_source"), "_targetRegistry")),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>().Target1),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>(dboGroupGreedy).Target1),
                    Tuple.Create <bool, bool, object>(true, true, new JoinBlock <int, int>(dboGroupNonGreedy).Target1),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new JoinBlock <int, int>().Target1, "_sharedResources")),
                    Tuple.Create <bool, bool, object>(true, true, new BatchedJoinBlock <int, int>(42).Target1),
                    Tuple.Create <bool, bool, object>(true, true, new BatchedJoinBlock <int, int>(42, dboGroupGreedy).Target1),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BatchBlock <int>(42), "_target")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BatchBlock <int>(42, dboGroupGreedy), "_target")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BatchBlock <int>(42, dboGroupNonGreedy), "_target")),
                    Tuple.Create <bool, bool, object>(true, false, new BufferBlock <int>().LinkTo(new ActionBlock <int>(i => {}))), // ActionOnDispose
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BroadcastBlock <int>(i => i), "_source")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BroadcastBlock <int>(i => i, dboGroupGreedy), "_source")),
                    Tuple.Create <bool, bool, object>(true, false, DebuggerAttributes.GetFieldValue(new BroadcastBlock <int>(i => i, dboGroupNonGreedy), "_source")),
                    Tuple.Create <bool, bool, object>(true, true, CreateNopLinkSource <int>()),
                    Tuple.Create <bool, bool, object>(true, true, CreateFilteringSource <int>()),
                    Tuple.Create <bool, bool, object>(true, true, CreateSendSource <int>()),
                    Tuple.Create <bool, bool, object>(true, false, CreateReceiveTarget <int>()),
                    Tuple.Create <bool, bool, object>(true, false, CreateOutputAvailableTarget()),
                    Tuple.Create <bool, bool, object>(true, false, CreateChooseTarget <int>()),
                    Tuple.Create <bool, bool, object>(true, false, new BufferBlock <int>().AsObservable().Subscribe(DataflowBlock.NullTarget <int>().AsObserver())),

                    // Other
                    Tuple.Create <bool, bool, object>(true, false, new DataflowMessageHeader(1)),
                };

                // Test all DDAs and DTPAs
                foreach (var obj in objectsToTest)
                {
                    if (obj.Item1)
                    {
                        DebuggerAttributes.ValidateDebuggerDisplayReferences(obj.Item3);
                    }
                    if (obj.Item2)
                    {
                        DebuggerAttributes.ValidateDebuggerTypeProxyProperties(obj.Item3);
                    }
                }
            }
        }
Exemplo n.º 25
0
        public static async Task ParallelForEach <T>(IEnumerable <T> resources, Func <T, Task> action, ExecutionDataflowBlockOptions options, Action <int, int> progress = null)
        {
            var done    = 0;
            var total   = resources.Count();
            var actions = new ActionBlock <T>(Run, options);

            foreach (var resource in resources)
            {
                await actions.SendAsync(resource);
            }

            actions.Complete();
            await actions.Completion;

            async Task Run(T item)
            {
                await action(item);

                progress?.Invoke(Interlocked.Increment(ref done), total);
            }
        }
Exemplo n.º 26
0
        void InitializeBlocks()
        {
            var linkOptions = new DataflowLinkOptions {
                PropagateCompletion = true
            };
            var executeOptions = new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = maxDegreeOfParallelism
            };
            int i = 0; int j = 0;
            var moveBlock = new TransformBlock <string, string>(sourceFilePath =>
            {
                var destinationFilePath = Path.Combine(processingFolderPath, Path.GetFileName(sourceFilePath));

                using (var from = File.OpenRead(sourceFilePath))
                    using (var to = File.OpenWrite(destinationFilePath))
                    {
                        from.CopyTo(to);
                    }

                return(destinationFilePath);
            }, new ExecutionDataflowBlockOptions {
                MaxDegreeOfParallelism = maxDegreeOfParallelism, CancellationToken = token
            });


            var broadcastTempFileBlock = new BroadcastBlock <string>(s => s, executeOptions);

            var readBlock = new TransformBlock <string, string>(async path => await File.ReadAllTextAsync(path), executeOptions);

            var aggregateBlock = new TransformBlock <string, int>(text => text.Where(symbol => Char.IsLetter(symbol)).Count(), executeOptions);

            var joinBlock = new JoinBlock <string, int>();

            var deleteTempFileBlock = new TransformBlock <Tuple <string, int>, Tuple <string, int> >(data => {
                File.Delete(data.Item1);
                return(data);
            });

            var saveBlock = new TransformBlock <Tuple <string, int>, string>(async data =>
            {
                var path = Path.Combine(resultFolderPath, Path.GetFileName(data.Item1));
                await File.WriteAllTextAsync(path, data.Item2.ToString());

                return(path);
            }, executeOptions);

            var raiseNotificationBlock = new ActionBlock <string>(s => { if (OnFinished != null)
                                                                         {
                                                                             OnFinished(s);
                                                                         }
                                                                  }, executeOptions);

            moveBlock.LinkTo(broadcastTempFileBlock, linkOptions);
            broadcastTempFileBlock.LinkTo(readBlock, linkOptions);
            broadcastTempFileBlock.LinkTo(joinBlock.Target1, linkOptions);
            readBlock.LinkTo(aggregateBlock, linkOptions);
            aggregateBlock.LinkTo(joinBlock.Target2, linkOptions);
            joinBlock.LinkTo(deleteTempFileBlock, linkOptions);
            deleteTempFileBlock.LinkTo(saveBlock, linkOptions);
            saveBlock.LinkTo(raiseNotificationBlock, linkOptions);

            startBlock  = moveBlock;
            finishBlock = raiseNotificationBlock;
        }
Exemplo n.º 27
0
 /// <summary>
 /// Initializes a new instance of the <see cref="DataflowWebHookSender"/> class with a given collection of <paramref name="retryDelays"/> and
 /// <paramref name="options"/> for how to manage the queuing policy for each transmission. The transmission model is as follows: each try
 /// and subsequent retries is managed by a separate <see cref="ActionBlock{T}"/> which controls the level of concurrency used to
 /// send out WebHooks. The <paramref name="options"/> parameter can be used to control all <see cref="ActionBlock{T}"/> instances
 /// by setting the maximum level of concurrency, length of queue, and more.
 /// </summary>
 /// <param name="logger">The current <see cref="ILogger"/>.</param>
 /// <param name="retryDelays">A collection of <see cref="TimeSpan"/> instances indicating the delay between each retry. If <c>null</c>,
 /// then a default retry policy is used of one retry after one 1 minute and then again after 4 minutes. A retry is attempted if the
 /// delivery fails or does not result in a 2xx HTTP status code. If the status code is 410 then no retry is attempted. If the collection
 /// is empty then no retries are attempted.</param>
 /// <param name="options">An <see cref="ExecutionDataflowBlockOptions"/> used to control the <see cref="ActionBlock{T}"/> instances.
 /// The default setting uses a maximum of 8 concurrent transmitters for each try or retry.</param>
 public DataflowWebHookSender(ILogger logger, IEnumerable <TimeSpan> retryDelays, ExecutionDataflowBlockOptions options)
     : this(logger, retryDelays, options, httpClient : null)
 {
 }
Exemplo n.º 28
0
        private static DataflowEndPoints <Message, Message> CreatePipeline()
        {
            var buffer = new BufferBlock <Message>();

            var executionOptions = new ExecutionDataflowBlockOptions()
            {
                BoundedCapacity        = 10,
                MaxDegreeOfParallelism = Environment.ProcessorCount,
                CancellationToken      = cancellationTokenSource.Token
            };

            var scaleInputData = new TransformBlock <Message, Message>(msg => {
                msg.Data *= 10;
                return(msg);
            }, executionOptions);

            processNegativeData = new TransformBlock <Message, Message>(msg => {
                if (msg.Data >= 0)
                {
                    throw new InvalidOperationException($"Invalid data. Data should be negative but was {msg.Data}.");
                }
                Task.Delay(rnd.Next(100, 500)).Wait();
                msg.Data *= 5;
                return(msg);
            }, executionOptions);
            NegativeCompletion = processNegativeData.Completion;

            processPositiveData = new TransformBlock <Message, Message>(msg => {
                if (msg.Data < 0)
                {
                    throw new InvalidOperationException($"Invalid data. Data should be positive but was {msg.Data}.");
                }
                Task.Delay(rnd.Next(100, 500)).Wait();
                msg.Data *= 5;
                return(msg);
            }, executionOptions);
            PositiveCompletion = processPositiveData.Completion;

            var outputBuffer = new BufferBlock <Message>();

            var linkOptions = new DataflowLinkOptions()
            {
                PropagateCompletion = true
            };

            buffer.LinkTo(scaleInputData, linkOptions);
            scaleInputData.LinkTo(processNegativeData, msg => msg.Data < 0);
            scaleInputData.LinkTo(processPositiveData, msg => msg.Data >= 0);
            processNegativeData.LinkTo(outputBuffer);
            processPositiveData.LinkTo(outputBuffer);
            scaleInputData.Completion.ContinueWith(_ => {
                processNegativeData.Complete();
                processPositiveData.Complete();
            });

            processNegativeData.Completion.ContinueWith(async _ => {
                await processPositiveData.Completion;
                outputBuffer.Complete();
            });

            return(new DataflowEndPoints <Message, Message>(buffer, outputBuffer));
        }
Exemplo n.º 29
0
 public static TransformBlock <TState, TState> GetWrappedTransformBlock <TState>(Func <TState, Task <TState> > action, ExecutionDataflowBlockOptions options) where TState : class, IWorkState, new()
 {
     return(new TransformBlock <TState, TState>(
                async(state) =>
     {
         try
         {
             return await action(state);
         }
         catch (Exception ex)
         {
             state.IsFaulted = true;
             state.EDI = ExceptionDispatchInfo.Capture(ex);
             return state;
         }
     }, options));
 }
Exemplo n.º 30
0
        public async Task HandleAsync(CommandContext context, NextDelegate next)
        {
            if (context.Command is BulkUpdateContents bulkUpdates)
            {
                if (bulkUpdates.Jobs?.Length > 0)
                {
                    var executionOptions = new ExecutionDataflowBlockOptions
                    {
                        MaxDegreeOfParallelism = Math.Max(1, Environment.ProcessorCount / 2)
                    };

                    var createCommandsBlock = new TransformManyBlock <BulkTask, BulkTaskCommand>(async task =>
                    {
                        return(await CreateCommandsAsync(task));
                    }, executionOptions);

                    var executeCommandBlock = new ActionBlock <BulkTaskCommand>(async command =>
                    {
                        await ExecuteCommandAsync(command);
                    }, executionOptions);

                    createCommandsBlock.LinkTo(executeCommandBlock, new DataflowLinkOptions
                    {
                        PropagateCompletion = true
                    });

                    contextProvider.Context.Change(b => b
                                                   .WithoutContentEnrichment()
                                                   .WithoutCleanup()
                                                   .WithUnpublished(true)
                                                   .WithoutTotal());

                    var requestedSchema = bulkUpdates.SchemaId.Name;

                    var results = new ConcurrentBag <BulkUpdateResultItem>();

                    for (var i = 0; i < bulkUpdates.Jobs.Length; i++)
                    {
                        var task = new BulkTask(
                            context.CommandBus,
                            requestedSchema,
                            i,
                            bulkUpdates.Jobs[i],
                            bulkUpdates,
                            results);

                        await createCommandsBlock.SendAsync(task);
                    }

                    createCommandsBlock.Complete();

                    await executeCommandBlock.Completion;

                    context.Complete(new BulkUpdateResult(results));
                }
                else
                {
                    context.Complete(new BulkUpdateResult());
                }
            }
            else
            {
                await next(context);
            }
        }
Exemplo n.º 31
0
 public static ActionBlock <TState> GetWrappedActionBlock <TState>(Func <TState, TState> action, ExecutionDataflowBlockOptions options)
 {
     return(new ActionBlock <TState>(
                (state) =>
     {
         try
         { action(state); }
         catch
         { }
     }, options));
 }
Exemplo n.º 32
0
        public async Task TestOrderMaintained()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                var options = new ExecutionDataflowBlockOptions { SingleProducerConstrained = singleProducerConstrained };
                int prev = -1;
                Action<int> body = i => 
                {
                    Assert.Equal(expected: prev + 1, actual: i);
                    prev = i;
                };

                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(body, options) :
                    new ActionBlock<int>(i => Task.Run(() => body(i)), options);
                ab.PostRange(0, 100);
                ab.Complete();
                await ab.Completion;
            }
        }
Exemplo n.º 33
0
 public static TransformBlock <TState, TState> GetTransformBlockAsync <TState>(Func <TState, TState> action, ExecutionDataflowBlockOptions options) where TState : class, IWorkState, new()
 {
     return(new TransformBlock <TState, TState>(action, options));
 }
Exemplo n.º 34
0
        public async Task TestOperationCanceledExceptionsIgnored()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                var options = new ExecutionDataflowBlockOptions { SingleProducerConstrained = singleProducerConstrained };
                int sumOfOdds = 0;
                Action<int> body = i => {
                    if ((i % 2) == 0) throw new OperationCanceledException();
                    sumOfOdds += i;
                };

                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(body, options) :
                    new ActionBlock<int>(async i => { await Task.Yield(); body(i); }, options);

                const int MaxValue = 10;
                ab.PostRange(0, MaxValue);
                ab.Complete();
                await ab.Completion;
                Assert.Equal(
                    expected: Enumerable.Range(0, MaxValue).Where(i => i % 2 != 0).Sum(),
                    actual: sumOfOdds);
            }
        }
Exemplo n.º 35
0
 public static TransformBlock <ReceivedData, TState> GetStateTransformBlock <TState>(Func <ReceivedData, Task <TState> > action, ExecutionDataflowBlockOptions options) where TState : class, IWorkState, new()
 {
     return(new TransformBlock <ReceivedData, TState>(
                async(data) =>
     {
         try
         { return await action(data); }
         catch
         { return null; }
     }, options));
 }
Exemplo n.º 36
0
        public async Task TestFaulting()
        {
            for (int trial = 0; trial < 3; trial++)
            foreach (bool singleProducerConstrained in DataflowTestHelpers.BooleanValues)
            {
                var options = new ExecutionDataflowBlockOptions { SingleProducerConstrained = singleProducerConstrained };
                Action thrower = () => { throw new InvalidOperationException(); };

                ActionBlock<int> ab = null;
                switch (trial)
                {
                    case 0: ab = new ActionBlock<int>(i => thrower(), options); break;
                    case 1: ab = new ActionBlock<int>(i => { thrower(); return Task.FromResult(0); }, options); break;
                    case 2: ab = new ActionBlock<int>(i => Task.Run(thrower), options); break;
                }
                for (int i = 0; i < 4; i++)
                {
                    ab.Post(i); // Post may return false, depending on race with ActionBlock faulting
                }

                try
                {
                    await ab.Completion;
                    Assert.True(false, "Should always throw IOE");
                }
                catch (InvalidOperationException) { }

                Assert.Equal(expected: 0, actual: ab.InputCount);
                Assert.False(ab.Post(5));
            }
        }
Exemplo n.º 37
0
 public static TransformBlock <TState, TState> GetByteManipulationTransformBlock <TState>(Func <ReadOnlyMemory <byte>, Task <byte[]> > action, ExecutionDataflowBlockOptions options, bool outbound, Predicate <TState> predicate) where TState : class, IWorkState, new()
 {
     return(new TransformBlock <TState, TState>(
                async(state) =>
     {
         try
         {
             if (outbound)
             {
                 if (state.SendData?.Length > 0)
                 {
                     state.SendData = await action(state.SendData);
                 }
                 else if (state.SendLetter.Body?.Length > 0)
                 {
                     state.SendLetter.Body = await action(state.SendLetter.Body);
                 }
             }
             else if (predicate(state))
             {
                 state.ReceivedData.Data = await action(state.ReceivedData.Data);
             }
             return state;
         }
         catch (Exception ex)
         {
             state.IsFaulted = true;
             state.EDI = ExceptionDispatchInfo.Capture(ex);
             return state;
         }
     }, options));
 }
Exemplo n.º 38
0
        public async Task TestReleasingOfPostponedMessages()
        {
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                Barrier barrier1 = new Barrier(2), barrier2 = new Barrier(2);
                Action<int> body = i => { barrier1.SignalAndWait(); barrier2.SignalAndWait(); };
                var options = new ExecutionDataflowBlockOptions { BoundedCapacity = 1 };
                ActionBlock<int> ab = sync ?
                    new ActionBlock<int>(body, options) :
                    new ActionBlock<int>(i => Task.Run(() => body(i)), options);

                ab.Post(0);
                barrier1.SignalAndWait();

                Task<bool>[] sends = Enumerable.Range(0, 10).Select(i => ab.SendAsync(i)).ToArray();
                Assert.All(sends, s => Assert.False(s.IsCompleted));

                ab.Complete();
                barrier2.SignalAndWait();

                await ab.Completion;

                Assert.All(sends, s => Assert.False(s.Result));
            }
        }
        public async Task <RemoteSegmentWithData[]> GetRemoteMetadata(ImportedEvent[] events)
        {
            // asynchronously start downloading all the metadata we need
            Log.Info($"Begin downloading metadata for segments (Request concurrency: {this.maxDegreeOfParallelism})");

            var groupedEvents = new ConcurrentDictionary <long, ConcurrentBag <ImportedEvent> >(
                this.maxDegreeOfParallelism,
                events.Length / 10);

            // execution options allows us to throttle requests
            var options = new ExecutionDataflowBlockOptions()
            {
                MaxDegreeOfParallelism    = this.maxDegreeOfParallelism,
                SingleProducerConstrained = true,
            };

            // the transform block maps A->B, ensuring that all events have an audio recording id
            var getRecordingIdBlock = new TransformBlock <ImportedEvent, ImportedEvent>(
                (importedEvent) => this.GetAudioRecordingId(importedEvent),
                options);

            // all events are buffered into groups based on audio recording id
            var groupRecordingsBlock = new ActionBlock <ImportedEvent>(
                importedEvent =>
            {
                var collection = groupedEvents.GetOrAdd(
                    importedEvent.AudioRecordingId.Value,
                    new ConcurrentBag <ImportedEvent>());

                collection.Add(importedEvent);
            });

            // the metadata for each recording is retrieved and used to produce many segments (one per imported event)
            var createSegmentsBlock = new TransformManyBlock <KeyValuePair <long, ConcurrentBag <ImportedEvent> >, RemoteSegmentWithData>(
                (group) => this.DownloadRemoteMetadata(group.Key, group.Value),
                options);

            // the transform block can't `Complete` unless it's output is empty
            // so add a buffer block to store the transform block's output
            var bufferBlock = new BufferBlock <RemoteSegmentWithData>();

            // link the two parts of block A
            getRecordingIdBlock.LinkTo(groupRecordingsBlock);

            // link the two parts of block B
            createSegmentsBlock.LinkTo(bufferBlock);

            // kick off the chain, resolve audio recording ids and group
            foreach (var record in events)
            {
                // post an event to the transform block to process
                getRecordingIdBlock.Post(record);
            }

            Log.Trace("Finished posting messages to recording id resolver");
            getRecordingIdBlock.Complete();

            Log.Trace("Waiting for getRecordingIdBlock to resolve");
            await getRecordingIdBlock.Completion;

            Log.Trace("Waiting for groupRecordingsBlock to resolve");
            groupRecordingsBlock.Complete();
            await groupRecordingsBlock.Completion;

            var eventCount = groupedEvents.Sum(kvp => kvp.Value.Count);

            Log.Trace($"Finished waiting for recording ids to resolve, {eventCount} events grouped into {groupedEvents.Count} recordings");

            // now post the grouped audio recordings to the segment generating block
            foreach (var keyValuePair in groupedEvents)
            {
                createSegmentsBlock.Post(keyValuePair);
            }

            Log.Trace("Finished posting messages to recording metadata downloader");
            createSegmentsBlock.Complete();

            // wait for all requests to finish
            Log.Trace("Begin waiting for metadata downloader");
            await createSegmentsBlock.Completion;

            Log.Trace("Finished waiting for metadata downloader");

            if (bufferBlock.TryReceiveAll(out var segments))
            {
                RemoteSegmentWithData[] segmentsArray;
                int finalEventCount;
                lock (segments)
                {
                    segmentsArray = segments.ToArray();

                    // do some excessive logic checking because we used to have race conditions
                    finalEventCount = segmentsArray.Sum(x => x.Data.Count);
                    if (events.Length != finalEventCount)
                    {
                        throw new InvalidOperationException(
                                  $"The number of supplied events ({events.Length}) did" +
                                  $" not match the number of events that had metadata resolved ({finalEventCount})" +
                                  " - a race condition has occurred");
                    }
                }

                Log.Info($"Metadata generated for {finalEventCount} events, {segmentsArray.Length} segments created");

                return(segmentsArray);
            }
            else
            {
                throw new InvalidOperationException("Failed to retrieve media info from data flow.");
            }
        }
Exemplo n.º 40
0
        private async Task AddFile()
        {
            const int chunkSize = 2 * 1024 * 1024; // 2MB

            IsProcessing = true;
            try
            {
                Message = "Uploading File...";

                var dlg = new Microsoft.Win32.OpenFileDialog {
                    Title = "Select File to Upload"
                };

                if (dlg.ShowDialog() != true)
                {
                    return;
                }
                var fileName = dlg.FileName;
                if (string.IsNullOrEmpty(fileName))
                {
                    return;
                }

                var sw = Stopwatch.StartNew();

                using (var service = new FileUploadServiceClient())
                {
                    var fileId = await service.CreateBlobFileAsync(Path.GetFileName(fileName),
                                                                   fileName, new FileInfo(fileName).Length, Environment.UserName);

                    using (var stream = File.OpenRead(fileName))
                    {
                        var edb = new ExecutionDataflowBlockOptions {
                            BoundedCapacity = 5, MaxDegreeOfParallelism = 5
                        };

                        var ab = new ActionBlock <Tuple <byte[], int> >(x => service.AddBlobFileChunkAsync(fileId, x.Item2, x.Item1), edb);

                        foreach (var item in stream.GetByteChunks(chunkSize).Select((x, i) => Tuple.Create(x, i)))
                        {
                            await ab.SendAsync(item);
                        }

                        ab.Complete();

                        await ab.Completion;
                    }
                }

                await RefreshData();

                Message = string.Format("Elapsed: {0} seconds", sw.Elapsed.TotalSeconds);
            }
            catch (Exception e)
            {
                Message = "Error";
                MessageBox.Show(e.Message, "Error");
            }
            finally
            {
                IsProcessing = false;
            }
        }
Exemplo n.º 41
0
        void InitDataFlow()
        {
            //Create schedulers
            scheduler             = new QueuedTaskScheduler(System.Threading.Tasks.TaskScheduler.Default, PARALLEL_READS);
            highPriorityScheduler = scheduler.ActivateNewQueue(0);
            lowPriorityScheduler  = scheduler.ActivateNewQueue(1);

            //create options
            optionsReadHighP = new ExecutionDataflowBlockOptions
            {
                TaskScheduler          = highPriorityScheduler,
                MaxDegreeOfParallelism = PARALLEL_READS,
                CancellationToken      = cancelTokenSrc.Token
            };

            //create options
            optionsReadHighP = new ExecutionDataflowBlockOptions
            {
                TaskScheduler          = highPriorityScheduler,
                MaxDegreeOfParallelism = PARALLEL_READS,
                CancellationToken      = cancelTokenSrc.Token
            };

            optionsReadLowP = new ExecutionDataflowBlockOptions
            {
                TaskScheduler          = lowPriorityScheduler,
                MaxDegreeOfParallelism = PARALLEL_READS,
                CancellationToken      = cancelTokenSrc.Token
            };


            optionsWriteBlock = new ExecutionDataflowBlockOptions
            {
                CancellationToken = cancelTokenSrc.Token
            };

            optionsBatchBlock = new GroupingDataflowBlockOptions
            {
                Greedy            = true,
                CancellationToken = cancelTokenSrc.Token,
            };

            optionsLink = new DataflowLinkOptions {
                PropagateCompletion = true,
            };

            // CollectionInfoSaver collectionInfoSaver = new CollectionInfoSaver(logger);

            //create blocks
            bufferBlockHighP = new BufferBlock <SchedulerJob>();
            bufferBlockLowP  = new BufferBlock <SchedulerJob>();

            highPriorityReadInfoBlock = new TransformBlock <SchedulerJob, CollectionResult>(async(sqlJob) => {
                if (sqlJob != null)
                {
                    if (sqlJob.JobUpdater != null)
                    {
                        return(await sqlJob.JobUpdater.UpdateJob(sqlJob));
                    }
                }

                return(null);
            }, optionsReadHighP);

            lowPriorityReadInfoBlock = new TransformBlock <SchedulerJob, CollectionResult>(async(sqlJob) => {
                if (sqlJob != null)
                {
                    if (sqlJob.JobUpdater != null)
                    {
                        return(await sqlJob.JobUpdater.UpdateJob(sqlJob));
                    }
                }

                return(null);
            }, optionsReadLowP);

            batchBlock = new BatchBlock <CollectionResult>(1, optionsBatchBlock);

            writeInfoBlock = new ActionBlock <CollectionResult[]>(sqlInfoArray => ResultSaver.SaveResults(sqlInfoArray), optionsWriteBlock);


            //link blocks
            bufferBlockHighP.LinkTo(highPriorityReadInfoBlock, optionsLink);
            bufferBlockLowP.LinkTo(lowPriorityReadInfoBlock, optionsLink);

            highPriorityReadInfoBlock.LinkTo(batchBlock, optionsLink);
            lowPriorityReadInfoBlock.LinkTo(batchBlock, optionsLink);

            batchBlock.LinkTo(writeInfoBlock, optionsLink);
        }
Exemplo n.º 42
0
        public async Task TestMessagePostponement()
        {
            const int Excess = 10;
            foreach (int boundedCapacity in new[] { 1, 3 })
            {
                var options = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity };
                foreach (var tb in new[] { new TransformBlock<int, int>(i => i, options), new TransformBlock<int, int>(i => Task.Run(() => i), options) })
                {
                    var sendAsync = new Task<bool>[boundedCapacity + Excess];
                    for (int i = 0; i < boundedCapacity + Excess; i++)
                    {
                        sendAsync[i] = tb.SendAsync(i);
                    }
                    tb.Complete();

                    for (int i = 0; i < boundedCapacity; i++)
                    {
                        Assert.True(sendAsync[i].IsCompleted);
                        Assert.True(sendAsync[i].Result);
                    }

                    for (int i = 0; i < Excess; i++)
                    {
                        Assert.False(await sendAsync[boundedCapacity + i]);
                    }
                }
            }
        }
Exemplo n.º 43
0
        static void Main(string[] args)
        {
            Console.WindowWidth   = 100;
            Console.WindowHeight += 8;
            var sync    = new ManualResetEventSlim();
            var options = new ExecutionDataflowBlockOptions {
                BoundedCapacity = 3
            };

            IPropagatorBlock <int, int> bb = new BufferBlock <int>();

            bb = new PropogateHook <int, int>("Buffer", bb, _sync);

            ITargetBlock <int> ab = new ActionBlock <int>(i =>
            {
                sync.Wait();
                Console.WriteLine("Action Block: Processing Value = {0}", i);
            }, options);

            bb.LinkTo(ab);
            var linkageToNullTarget = bb.LinkTo(DataflowBlock.NullTarget <int>());

            Console.WriteLine(@"                    |");
            Console.WriteLine(@"                   \|/");
            Console.WriteLine(@"             BufferBlock");
            Console.WriteLine(@"              |        |");
            Console.WriteLine(@"             \|/      \|/");
            Console.WriteLine(@"      ActionBlock     NullTarget");
            Console.WriteLine(@"");

            Console.ForegroundColor = ConsoleColor.White;
            Console.WriteLine("now we will offer 4 messages.\r\n");
            Console.ResetColor();

            Console.WriteLine("press any key to continue.......\r\n");
            Console.ReadKey(true);

            int item = 0;

            for (int i = 0; i < 4; i++)
            {
                item++;
                var header = new DataflowMessageHeader(item);
                (bb as ITargetBlock <int>).OfferMessage(header, item, null, false);
            }

            Thread.Sleep(100);
            Console.WriteLine("\r\n#########################################");
            Console.WriteLine("4 items was offered to the Action Block");
            Console.WriteLine("the Action Block postponed the last one");
            Console.WriteLine("because it reached its Bounded Capacity.");
            Console.WriteLine("\r\nthe item was offered to the NullTarget");
            Console.WriteLine("which accept it. now the item is no longer ");
            Console.WriteLine("available on the buffer output queue.");
            Console.WriteLine("#########################################\r\n");

            Console.ForegroundColor = ConsoleColor.White;
            Console.WriteLine("now we will let the Action Block processing");
            Console.WriteLine("all the items in its input queue.\r\n");
            Console.ResetColor();

            Console.WriteLine("press any key to continue.......\r\n");
            Console.ReadKey(true);

            sync.Set();
            Thread.Sleep(100);
            Console.WriteLine("\r\n#########################################");
            Console.WriteLine("the Action Block processed all the item in its input queue.");
            Console.WriteLine("(you may notice, that it try to consume message 4");
            Console.WriteLine("just when it complete to process the first message).");
            Console.WriteLine("Right now the Action Block has noting in its input queue,");
            Console.WriteLine("so theoretically it should accept new offering.");
            Console.WriteLine("#########################################\r\n");

            Console.ForegroundColor = ConsoleColor.White;
            Console.WriteLine("next we will offer 2 more items");
            Console.ResetColor();

            Console.WriteLine("press any key to continue.......\r\n");
            Console.ReadKey(true);

            for (int i = 0; i < 2; i++)
            {
                item++;
                var header = new DataflowMessageHeader(item);
                (bb as ITargetBlock <int>).OfferMessage(header, item, null, false);
            }

            Thread.Sleep(100);
            Console.ResetColor();
            Console.WriteLine("\r\n#########################################");
            Console.WriteLine("the Action Block postponed the message");
            Console.WriteLine("and try to get it via consume (in instead ");
            Console.WriteLine("of accepting the offering)");
            Console.WriteLine("in the mean time the message accepted");
            Console.WriteLine("by the NullTarget and no longer available to be consumed.");
            Console.WriteLine("\r\nTHIS IS SUPPRISING BEHAVIOR, ");
            Console.WriteLine("\r\nthe action block will never get any item");
            Console.WriteLine("unless the NullTarget will be unlinked");
            Console.WriteLine("#########################################\r\n");


            Console.ForegroundColor = ConsoleColor.White;
            Console.WriteLine("next we will unlink the NullTarget");
            Console.WriteLine("and then offer 2 messages\r\n");
            Console.ResetColor();

            Console.WriteLine("press any key to continue.......\r\n");
            Console.ReadKey(true);

            linkageToNullTarget.Dispose();
            Thread.Sleep(100);
            for (int i = 0; i < 2; i++)
            {
                item++;
                var header = new DataflowMessageHeader(item);
                (bb as ITargetBlock <int>).OfferMessage(header, item, null, false);
            }
            Thread.Sleep(500);

            Console.WriteLine("\r\n#########################################");
            Console.WriteLine("Summary: this behavior is not exactly what you would expected,");
            Console.WriteLine("by drilling down in to the IL, we found that");
            Console.WriteLine("the value of OutstandingTransfers in the following statement");
            Console.WriteLine("... || m_boundingState.OutstandingTransfers == 0 && ...");
            Console.WriteLine("is 1 therefore the action block is taking a wired route");
            Console.WriteLine("#########################################\r\n");

            Console.ReadKey(true);
        }
Exemplo n.º 44
0
        public async Task TestOrdering_Async_OrderedDisabled()
        {
            // If ordering were enabled, this test would hang.

            var options = new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = DataflowBlockOptions.Unbounded, EnsureOrdered = false };

            var tasks = new TaskCompletionSource<int>[10];
            for (int i = 0; i < tasks.Length; i++)
            {
                tasks[i] = new TaskCompletionSource<int>();
            }

            var tb = new TransformBlock<int, int>(i => tasks[i].Task, options);
            tb.PostRange(0, tasks.Length);

            for (int i = tasks.Length - 1; i >= 0; i--)
            {
                tasks[i].SetResult(i);
                Assert.Equal(expected: i, actual: await tb.ReceiveAsync());
            }

            tb.Complete();
            await tb.Completion;
        }
Exemplo n.º 45
0
        public static IDisposable Start(List <string> urls, Func <string, byte[], Task> compute)
        {
            // Step 1
            var downloaderOptions = new ExecutionDataflowBlockOptions()
            {
            };
            var downloader = new TransformBlock <string, string>(
                async(url) =>
            {
                // using IOCP the thread pool worker thread does return to the pool
                WebClient wc  = new WebClient();
                string result = await wc.DownloadStringTaskAsync(url);
                return(result);
            }, downloaderOptions);


            var printer = new ActionBlock <string>(text =>
            {
                Console.WriteLine($"Recveied text - Thread ID {Thread.CurrentThread.ManagedThreadId}");
            });

            downloader.LinkTo(printer);

            foreach (var url in urls)
            {
                downloader.Post(url);
            }

            // Step 2
            var contentBroadcaster = new BroadcastBlock <string>(s => s);

            var linkParser = new TransformManyBlock <string, string>(
                (html) =>
            {
                var doc = new HtmlDocument();
                doc.LoadHtml(html);

                var links = from n in doc.DocumentNode.Descendants("a")
                            where n.Attributes.Contains("href")
                            let url = n.GetAttributeValue("href", "")
                                      where httpRgx.IsMatch(url)
                                      select url;

                return(links);
            });

            var imgParser = new TransformManyBlock <string, string>(
                (html) =>
            {
                var doc = new HtmlDocument();
                doc.LoadHtml(html);

                var imageLinks = from n in doc.DocumentNode.Descendants("img")
                                 where n.Attributes.Contains("src")
                                 let url = n.GetAttributeValue("src", "")
                                           where httpRgx.IsMatch(url)
                                           select url;
                return(imageLinks);
            });


            // Step 3

            var linkBroadcaster = new BroadcastBlock <string>(s => s);

            var writer = new ActionBlock <string>(async url =>
            {
                WebClient wc = new WebClient();
                // using IOCP the thread pool worker thread does return to the pool
                byte[] buffer = await wc.DownloadDataTaskAsync(url);

                await compute(url, buffer);
            });

            StringComparison   comparison = StringComparison.InvariantCultureIgnoreCase;
            Predicate <string> linkFilter = link =>
                                            link.IndexOf(".aspx", comparison) != -1 ||
                                            link.IndexOf(".php", comparison) != -1 ||
                                            link.IndexOf(".htm", comparison) != -1 ||
                                            link.IndexOf(".html", comparison) != -1;

            Predicate <string> imgFilter = url =>
                                           url.EndsWith(".jpg", comparison) ||
                                           url.EndsWith(".png", comparison) ||
                                           url.EndsWith(".gif", comparison);

            IDisposable disposeAll = new CompositeDisposable(
                // from [downloader] to [contentBroadcaster]
                downloader.LinkTo(contentBroadcaster),
                // from [contentBroadcaster] to [imgParser]
                contentBroadcaster.LinkTo(imgParser),
                // from [contentBroadcaster] to [linkParserHRef]
                contentBroadcaster.LinkTo(linkParser),
                // from [linkParser] to [linkBroadcaster]
                linkParser.LinkTo(linkBroadcaster),
                // conditional link to from [linkBroadcaster] to [downloader]
                linkBroadcaster.LinkTo(downloader, linkFilter),
                // from [linkBroadcaster] to [writer]
                linkBroadcaster.LinkTo(writer, imgFilter),
                // from [imgParser] to [writer]
                imgParser.LinkTo(writer));

            return(disposeAll);
        }
        public async Task TestAsObservableAndAsObserver_AllObserversGetData()
        {
            int total = 0;
            var options = new ExecutionDataflowBlockOptions { TaskScheduler = new ConcurrentExclusiveSchedulerPair().ExclusiveScheduler };
            ITargetBlock<int>[] targets = Enumerable.Range(0, 3).Select(_ => new ActionBlock<int>(i => total += i, options)).ToArray();

            var source = new BufferBlock<int>();
            var sourceObservable = source.AsObservable();
            foreach (var target in targets)
            {
                sourceObservable.Subscribe(target.AsObserver());
            }

            int expectedTotal = 0;
            for (int i = 1; i <= 10; i++)
            {
                expectedTotal += i * targets.Length;
                source.Post(i);
            }
            source.Complete();

            await source.Completion;
            foreach (var target in targets)
            {
                await target.Completion;
            }
            Assert.Equal(expected: expectedTotal, actual: total);
        }
Exemplo n.º 47
0
        public async Task TestArrayListReusePossibleForDop1()
        {
            foreach (int boundedCapacity in new[] { DataflowBlockOptions.Unbounded, 2 })
            foreach (bool sync in DataflowTestHelpers.BooleanValues)
            {
                foreach (int dop in new[] { 1, Environment.ProcessorCount })
                {
                    var dbo = new ExecutionDataflowBlockOptions { BoundedCapacity = boundedCapacity, MaxDegreeOfParallelism = dop };
                    foreach (IList<int> list in new IList<int>[] { new int[1], new List<int> { 0 }, new Collection<int> { 0 } })
                    {
                        int nextExpectedValue = 1;

                        TransformManyBlock<int, int> transform = null;
                        Func<int, IEnumerable<int>> body = i => {
                            if (i == 100) // we're done iterating
                            {
                                transform.Complete();
                                return (IEnumerable<int>)null;
                            }
                            else if (dop == 1)
                            {
                                list[0] = i + 1; // reuse the list over and over, but only at dop == 1
                                return (IEnumerable<int>)list;
                            }
                            else if (list is int[])
                            {
                                return new int[1] { i + 1 };
                            }
                            else if (list is List<int>)
                            {
                                return new List<int>() { i + 1 };
                            }
                            else
                            {
                                return new Collection<int>() { i + 1 };
                            }
                        };

                        transform = sync ?
                            new TransformManyBlock<int, int>(body, dbo) :
                            new TransformManyBlock<int, int>(i => Task.Run(() => body(i)), dbo);

                        TransformBlock<int, int> verifier = new TransformBlock<int, int>(i => {
                            Assert.Equal(expected: nextExpectedValue, actual: i);
                            nextExpectedValue++;
                            return i;
                        });

                        transform.LinkTo(verifier);
                        verifier.LinkTo(transform);

                        await transform.SendAsync(0);
                        await transform.Completion;
                    }
                }
            }
        }