/// <summary>
 /// Runs each rail on its own worker of the given IExecutorService, similar
 /// to how Flowable.observeOn operates.
 /// </summary>
 /// <typeparam name="T">The value type.</typeparam>
 /// <param name="source">The source IParallelFlowable instance.</param>
 /// <param name="executor">The IExecutorService that will provide the workers for each rail.</param>
 /// <returns>The new IParallelFlowable instance.</returns>
 public static IParallelFlowable <T> RunOn <T>(this IParallelFlowable <T> source, IExecutorService executor)
 {
     return(RunOn(source, executor, Flowable.BufferSize()));
 }
 /// <summary>
 /// Creates an IParallelFlowable with the provided parallelism
 /// and default prefetch amount.
 /// </summary>
 /// <typeparam name="T">The value type.</typeparam>
 /// <param name="source">The source IFlowable instance.</param>
 /// <param name="parallelism">The number of parallel 'rail's to create, positive.</param>
 /// <returns>The new IParallelFlowable instance.</returns>
 public static IParallelFlowable <T> Parallel <T>(this IFlowable <T> source, int parallelism)
 {
     return(Parallel(source, parallelism, Flowable.BufferSize()));
 }
 /// <summary>
 /// Merges/flattens a maximum number of the IPublisher at once returned by the shared mapper function for each rail item into a
 /// possibly interleaved sequence of values.
 /// </summary>
 /// <typeparam name="T">The upstream rail value type.</typeparam>
 /// <typeparam name="R">The result value type.</typeparam>
 /// <param name="source">The source IParallelFlowable instance.</param>
 /// <param name="mapper">The function that receives the upstream rail item and should
 /// return an IPublisher whose items are merged.</param>
 /// <param name="maxConcurrency">The maximum number of active IPublishers per rail.</param>
 /// <returns>The new IParallelFlowable instance.</returns>
 public static IParallelFlowable <R> FlatMap <T, R>(this IParallelFlowable <T> source, Func <T, IPublisher <R> > mapper, int maxConcurrency)
 {
     return(FlatMap <T, R>(source, mapper, maxConcurrency, Flowable.BufferSize()));
 }
        // ********************************************************************************
        // Instance operators
        // ********************************************************************************

        /// <summary>
        /// Creates an IParallelFlowable with parallelism equal to the number of
        /// available processors and default prefetch amount.
        /// </summary>
        /// <typeparam name="T">The value type.</typeparam>
        /// <param name="source">The source IFlowable instance.</param>
        /// <returns>The new IParallelFlowable instance.</returns>
        public static IParallelFlowable <T> Parallel <T>(this IFlowable <T> source)
        {
            return(Parallel(source, Environment.ProcessorCount, Flowable.BufferSize()));
        }
Пример #5
0
 /// <summary>
 /// Constructs a MulticastPublisher with the given executor service
 /// and default buffer size.
 /// </summary>
 /// <param name="executor">The IExecutorService to use.</param>
 public MulticastPublisher(IExecutorService executor) : this(executor, Flowable.BufferSize())
 {
 }
Пример #6
0
 /// <summary>
 /// Constructs a MulticastPublisher with the Executors.Task service and
 /// default buffer size.
 /// </summary>
 public MulticastPublisher() : this(Executors.Task, Flowable.BufferSize())
 {
 }