Example #1
0
        /// <summary>
        ///  Projects each element of a sequence into a new form using multiple Tasks / Threads.
        /// </summary>
        /// <param name="source">A sequence of values to invoke a transform function on.</param>
        /// <param name="selector">A transform function to apply to each source element.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        public static TResult[] SelectP <T, TResult>(this T[] source, Func <T, TResult> selector, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (selector == null)
            {
                throw Error.ArgumentNull(nameof(selector));
            }

            TResult[] r = new TResult[source.Length];

            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Length, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    (range, s) => {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    r[i] = selector(source[i]);
                }
            });

            return(r);
        }
Example #2
0
        /// <summary>
        ///  Projects each element of a sequence into a new form by incorporating the element's index using multiple Tasks / Threads.
        ///  !!Order of the collection is not preserved!! for more speed and less memory use.
        /// </summary>
        /// <param name="source">A sequence of values to invoke a transform function on.</param>
        /// <param name="selector">A transform function to apply to each source element; the second parameter of the function represents the index of the source element.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>A sequence whose elements are the result of invoking the transform function on each element of source.</returns>
        public static List <TResult> SelectUnorderedP <T, TResult>(this List <T> source, Func <T, int, TResult> selector, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (selector == null)
            {
                throw Error.ArgumentNull(nameof(selector));
            }

            List <TResult> r = new List <TResult>(source.Count);

            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Count, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    (range, s) => {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    r.Add(selector(source[i], i));
                }
            });


            return(r);
        }
Example #3
0
 /// <summary>
 /// Executes a foreach operation with thread-local data on a <see cref="OrderablePartitioner{TSource}"/>
 /// in which iterations may run in parallel, loop options can be configured, and the state of the loop
 /// can be monitored and manipulated.
 /// </summary>
 public static SystemParallelLoopResult ForEach <TSource, TLocal>(OrderablePartitioner <TSource> source,
                                                                  Func <TLocal> localInit, Func <TSource, SystemParallelLoopState, long, TLocal, TLocal> body,
                                                                  Action <TLocal> localFinally)
 {
     ExceptionProvider.ThrowUncontrolledInvocationException(nameof(SystemParallel.ForEach));
     return(SystemParallel.ForEach(source, localInit, body, localFinally));
 }
Example #4
0
        public void LoadWorld()
        {
            string zoneLocation = GlobalReference.GlobalValues.Settings.ZoneDirectory;

            string[] zones = GlobalReference.GlobalValues.FileIO.GetFilesFromDirectory(zoneLocation);

            if (zones.Length == 0)
            {
                throw new FileNotFoundException("No zone files found in " + zoneLocation);
            }

            foreach (string file in zones)
            {
                string filePath = Path.GetFullPath(file);

                GlobalReference.GlobalValues.Logger.Log(LogLevel.DEBUG, "Loading " + filePath);

                IZone zone = DeserializeZone(GlobalReference.GlobalValues.FileIO.ReadAllText(filePath));
                zone.FinishLoad();
                Zones.Add(zone.Id, zone);
                _zoneIdToFileMap.Add(zone.Id, file);
            }

            _zones           = new List <IZone>(Zones.Values);
            _zonePartitioner = Partitioner.Create(0, _zones.Count);
        }
Example #5
0
        public static async Task ParallelForEachAsync <T>(
            this IEnumerable <T> source, Func <T, Task> asyncAction, int?maxDegreeOfParallelism = null)
        {
            maxDegreeOfParallelism ??= DefaultMaxDegreeOfParallelism;
            if (maxDegreeOfParallelism <= 0)
            {
                throw new ArgumentOutOfRangeException(nameof(maxDegreeOfParallelism));
            }

            if (maxDegreeOfParallelism == 1)
            {
                foreach (T value in source)
                {
                    await asyncAction(value);
                }

                return;
            }

            OrderablePartitioner <T> partitioner = source is IList <T> list
                ? Partitioner.Create(list, loadBalance : true)
                : Partitioner.Create(source, EnumerablePartitionerOptions.NoBuffering);

            await Task.WhenAll(partitioner
                               .GetPartitions(maxDegreeOfParallelism.Value)
                               .Select(async partition =>
            {
                while (partition.MoveNext())
                {
                    await asyncAction(partition.Current);
                }
            }));
        }
Example #6
0
        public static async Task ParallelForEachAsync <T>(
            this IEnumerable <T> source, Func <T, int, Task> asyncAction, int?maxDegreeOfParallelism = null)
        {
            if (maxDegreeOfParallelism == 1)
            {
                await source.ForEachAsync(asyncAction);

                return;
            }

            maxDegreeOfParallelism ??= Math.Min(Environment.ProcessorCount, 512);
            OrderablePartitioner <T> partitioner = source is IList <T> list
                ? Partitioner.Create(list, loadBalance : true)
                : Partitioner.Create(source, EnumerablePartitionerOptions.NoBuffering);

            await Task.WhenAll(partitioner
                               .GetPartitions(maxDegreeOfParallelism.Value)
                               .Select((partition, index) => Task.Run(async() =>
            {
                while (partition.MoveNext())
                {
                    await asyncAction(partition.Current, index);
                }
            })));
        }
        /// <summary>
        /// Determines the OrdinalIndexState for a partitioner
        /// </summary>
        internal static OrdinalIndexState GetOrdinalIndexState(Partitioner <TElement> partitioner)
        {
            OrderablePartitioner <TElement> orderablePartitioner = partitioner as OrderablePartitioner <TElement>;

            if (orderablePartitioner == null)
            {
                return(OrdinalIndexState.Shuffled);
            }

            if (orderablePartitioner.KeysOrderedInEachPartition)
            {
                if (orderablePartitioner.KeysNormalized)
                {
                    return(OrdinalIndexState.Correct);
                }
                else
                {
                    return(OrdinalIndexState.Increasing);
                }
            }
            else
            {
                return(OrdinalIndexState.Shuffled);
            }
        }
Example #8
0
        /// <summary>
        /// Adds the transformed sequence of elements using multiple Tasks / Threads.
        /// </summary>
        /// <param name="source">The sequence of values to transform then sum.</param>
        /// <param name="selector">A transformation function.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>The sum of the transformed elements.</returns>
        public static decimal SumP <T>(this List <T> source, Func <T, decimal> selector, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (selector == null)
            {
                throw Error.ArgumentNull(nameof(selector));
            }

            decimal sum  = 0;
            object  LOCK = new object();
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Count, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    () => (decimal)0.0,
                                                    (range, s, acc) =>
            {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    acc += selector(source[i]);
                }

                return(acc);
            },
                                                    acc => { lock (LOCK) { sum += acc; } });

            return(sum);
        }
Example #9
0
        /// <summary>
        ///  Adds a sequence of values using multiple Tasks / Threads.
        /// </summary>
        /// <param name="source">The sequence to add.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>The sum of the sequence.</returns>
        public static long SumP(this long[] source, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }
            long sum = 0;
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Length, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    () => 0L,
                                                    (range, s, acc) =>
            {
                checked
                {
                    for (int i = range.Item1; i < range.Item2; i++)
                    {
                        acc += source[i];
                    }
                }
                return(acc);
            },
                                                    acc => Interlocked.Add(ref sum, acc));

            return(sum);
        }
Example #10
0
        public int[][] ParallelForPartition2()
        {
            var nodeIndexMap = new int[ElementsCount][];

            byte[] linksBuffer = GetLinksBuffer();

            OrderablePartitioner <Tuple <int, int> > rangePartitioner = Partitioner.Create(0, ElementsCount, 1000);

            Parallel.ForEach(rangePartitioner,
                             parallelOptions,
                             range =>
            {
                var items = new LocalItem2[range.Item2 - range.Item1];
                int j     = 0;

                for (int i = range.Item1; i < range.Item2; i++)
                {
                    items[j] = Process2(ref i, linksBuffer);
                    j++;
                }

                foreach (LocalItem2 localItem in items)
                {
                    nodeIndexMap[localItem.ItemId] = new int[1] {
                        localItem.I + 1
                    };
                }
            });

            return(nodeIndexMap);
        }
Example #11
0
        /// <summary>
        /// Adds the transformed sequence of elements using multiple Tasks / Threads.
        /// </summary>
        /// <param name="source">The sequence of values to transform then sum.</param>
        /// <param name="selector">A transformation function.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>The sum of the transformed elements.</returns>
        public static int SumP <T>(this List <T> source, Func <T, int> selector, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (selector == null)
            {
                throw Error.ArgumentNull(nameof(selector));
            }

            int sum = 0;
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Count, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    () => 0,
                                                    (range, s, acc) =>
            {
                checked
                {
                    for (int i = range.Item1; i < range.Item2; i++)
                    {
                        acc += selector(source[i]);
                    }
                }
                return(acc);
            },
                                                    acc => Interlocked.Add(ref sum, acc));

            return(sum);
        }
Example #12
0
        static void Main(string[] args)
        {
            double[]  resultData    = new double[100000000];
            const int partitionSize = 100000;
            Stopwatch stopwatch     = new Stopwatch();

            // Created a partitioner that will chunk the data
            OrderablePartitioner <Tuple <int, int> > chunkPart = Partitioner.Create(0, resultData.Length, partitionSize);

            // Perform the loop in chunks
            stopwatch.Start();
            Parallel.ForEach(chunkPart, (Tuple <int, int> chunkRange) =>
            {
                // Iterate through all of the values in the chunk range
                for (int i = chunkRange.Item1; i < chunkRange.Item2; i++)
                {
                    resultData[i] = Math.Pow(i, 2);
                }
            });
            stopwatch.Stop();
            Console.WriteLine("Loop takes {0} seconds", stopwatch.Elapsed.TotalSeconds);

            // Wait for input before exiting
            Console.WriteLine("Press enter to finish");
            Console.ReadLine();
        }
Example #13
0
        private static void UsingOrderablePartitioner()
        {
            IList <string> sourceData = new List <string>()
            {
                "an", "apple", "a", "day", "keeps", "the", "doctor", "away"
            };

            string[] resultData = new string[sourceData.Count];

            OrderablePartitioner <string> op = Partitioner.Create(sourceData);

            Parallel.ForEach(op, (string item, ParallelLoopState loopState, long index) =>
            {
                if (item == "apple")
                {
                    item = "apricot";
                }
                resultData[index] = item;
            });

            for (int i = 0; i < resultData.Length; i++)
            {
                Console.WriteLine($"Item {i}: {resultData[i]}.");
            }
        }
Example #14
0
        /// <summary>
        /// Computes the average of a sequence in parallel.
        /// </summary>
        /// <param name="source">The array to calculate the average of.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>The average of the array.</returns>
        public static double AverageP(this List <long> source, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (source.Count == 0)
            {
                throw Error.NoElements();
            }

            long sum = 0;
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Count, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    () => 0L,
                                                    (range, s, acc) =>
            {
                checked
                {
                    for (int i = range.Item1; i < range.Item2; i++)
                    {
                        acc += source[i];
                    }
                }
                return(acc);
            },
                                                    acc => Interlocked.Add(ref sum, acc));

            return((double)sum / source.Count);
        }
Example #15
0
        //Rather than using a Parallel.For() loop that invokes the delegate for each index value
        //we invoke a delegate for each chunk
        static void Main88(string[] args)
        {
            // create the results array
            double[] resultData = new double[10000000];

            // created a partioner that will chunk the data
            //each Tuple represents a chunk or range of index values.
            //The Tuple.Item1 value is the inclusive start index of the range, and the Tuple.Item2 value is the exclusive end index of the range
            OrderablePartitioner <Tuple <int, int> > chunkPart =
                Partitioner.Create(0,                   //a start index
                                   resultData.Length,   //an end index
                                   10000);              //optionally the range of index values that each chunk should represent

            // perform the loop in chunks
            Parallel.ForEach(chunkPart, chunkRange =>
            {
                // iterate through all of the values in the chunk range
                for (int i = chunkRange.Item1; i < chunkRange.Item2; i++)
                {
                    resultData[i] = Math.Pow(i, 2);
                }
            });
            //By breaking 10,000,000 index values into chunks of 10,000, we reduce the number of times that the delegate is invoked to 1,000

            /* If you do not specify the size of each chunk, then the default will be used;
             * number of items divided by three times the number of processor cores available.
             * For example, for 1,000 index values on a four-way machine, 1000 / (3 × 4) values= toal values per chunk.
             * The default may be calculated differently in future releases
             */

            // wait for input before exiting
            Console.WriteLine("Press enter to finish");
            Console.ReadLine();
        }
Example #16
0
        // Generalized test for testing OrderablePartitioner ForEach-loop results
        private static void OrderablePartitionerForEachPLRTest(
            Action <int, ParallelLoopState, long> body,
            string desc,
            bool excExpected,
            bool shouldComplete,
            bool shouldStop,
            bool shouldBreak)
        {
            List <int> list = new List <int>();

            for (int i = 0; i < 20; i++)
            {
                list.Add(i);
            }
            OrderablePartitioner <int> mop = Partitioner.Create(list, true);

            try
            {
                ParallelLoopResult plr = Parallel.ForEach(mop, body);

                Assert.False(excExpected);

                Assert.Equal(shouldComplete, plr.IsCompleted);
                Assert.Equal(shouldStop, plr.LowestBreakIteration == null);
                Assert.Equal(shouldBreak, plr.LowestBreakIteration != null);
            }
            catch (AggregateException)
            {
                Assert.True(excExpected);
            }
        }
Example #17
0
        public static void UsingOrderedPartitioningStrategy()
        {
            IList <string> sourceData = new List <string>()
            {
                "an", "apple", "a", "day", "keeps", "the", "doctor", "away"
            };

            string[] resultData = new string[sourceData.Count];

            OrderablePartitioner <string> op = Partitioner.Create(sourceData);

            Parallel.ForEach(op, (string item, ParallelLoopState loopState, long index) =>
            {
                if (item == "apple")
                {
                    item = "apricot";
                }
                resultData[index] = item;
            });

            for (int i = 0; i < resultData.Length; i++)
            {
                Console.WriteLine("Item {0} is {1}", i, resultData[i]);
            }

            EndOfProgram();
        }
Example #18
0
        internal override IList <IEnumerable <KeyValuePair <long, T> > > GetOrderedEnumerables(QueryOptions options)
        {
            OrderablePartitioner <T> partitioner = null;

            if (customPartitioner != null)
            {
                partitioner = customPartitioner as OrderablePartitioner <T>;
                if (partitioner == null)
                {
                    throw new InvalidOperationException("The partitionner you are using doesn't support ordered partitionning");
                }
            }
            else
            {
                partitioner =
                    (options.UseStrip) ? ParallelPartitioner.CreateForStrips(source, 1) : ParallelPartitioner.CreateBest(source);
            }

            options.PartitionerSettings = Tuple.Create(partitioner.KeysOrderedAcrossPartitions,
                                                       partitioner.KeysOrderedInEachPartition,
                                                       partitioner.KeysNormalized);

            // We only support one style of partitioning at the moment.
            // Standard partitioners follow this style.
            if (options.UseStrip && (!partitioner.KeysOrderedInEachPartition || partitioner.KeysOrderedAcrossPartitions))
            {
                throw new NotImplementedException("Partitioner must have KeysOrderedInEachPartition "
                                                  + "and !KeysOrderedAcrossPartitions"
                                                  + "to be used with indexed operators");
            }

            return(WrapHelper.Wrap(partitioner.GetOrderablePartitions(options.PartitionCount)));
        }
Example #19
0
        /// <summary>
        /// Determines whether an array contains a specified element by using the
        /// provided IEqualityComparer or the default comparer if none is provided,
        /// in parallel. If using a default comparer, use the non parallel version, it is faster.
        /// </summary>
        /// <param name="source">An array in which to locate a value.</param>
        /// <param name="value">The value to locate.</param>
        /// <param name="comparer">An equality comparer to compare values.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>true if the source sequence contains an element that has the specified value; otherwise, false.</returns>
        public static bool ContainsP <TSource>(this List <TSource> source, TSource value, IEqualityComparer <TSource> comparer, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (comparer == null)
            {
                throw Error.ArgumentNull("comparer");
            }


            int total = 0;
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Count, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    (range, loopState) =>
            {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    if (comparer.Equals(source[i], value))
                    {
                        Interlocked.Increment(ref total);
                        loopState.Stop();
                    }
                }
            });


            return(total > 0);
        }
Example #20
0
        internal override IList <IEnumerable <KeyValuePair <long, TSource> > > GetOrderedEnumerables(QueryOptions options)
        {
            ReverseList <TSource>          reversed    = new ReverseList <TSource> (source.ToListOrdered());
            OrderablePartitioner <TSource> partitioner = ParallelPartitioner.CreateForStrips(reversed, 1);

            return(WrapHelper.Wrap(partitioner.GetOrderablePartitions(options.PartitionCount)));
        }
Example #21
0
 /// <summary>
 /// Executes a <c>foreach</c> (<c>For Each</c> in Visual Basic) operation with 64-bit indexes and with thread-local data on a <see cref="OrderablePartitioner{TSource}"/> in which iterations are run sequentially,
 /// loop options can be configured,
 /// and the state of the loop can be monitored and manipulated.
 /// </summary>
 /// <typeparam name="TSource">The type of the elements in source.</typeparam>
 /// <typeparam name="TLocal">The type of the thread-local data.</typeparam>
 /// <param name="source">The orderable partitioner that contains the original data source.</param>
 /// <param name="parallelOptions">An object that configures the behavior of this operation.</param>
 /// <param name="localInit">The function delegate that returns the initial state of the local data for each task.</param>
 /// <param name="body">The delegate that is invoked once per iteration.</param>
 /// <param name="localFinally">The delegate that performs a final action on the local state of each task.</param>
 /// <returns>A structure that contains information about which portion of the loop completed.</returns>
 public static ParallelLoopResult ForEach <TSource, TLocal>(OrderablePartitioner <TSource> source, ParallelOptions parallelOptions, Func <TLocal> localInit, Func <TSource, ParallelLoopState, long, TLocal, TLocal> body, Action <TLocal> localFinally) =>
 Parallel.ForEach(
     source,
     MakeSequential(parallelOptions),
     localInit,
     body,
     localFinally);
Example #22
0
        //The OrdereablePartitioner class extends Partitioner.
        //if you need to preserve order, you need to use OrderablePartitioner
        static void Main89(string[] args)
        {
            // create the source data
            IList <string> sourceData
                = new List <string>()
                {
                "an", "apple", "a", "day", "keeps", "the", "doctor", "away"
                };

            // create an array to hold the results
            string[] resultData = new string[sourceData.Count];
            // create an orderable partitioner
            OrderablePartitioner <string> op = Partitioner.Create(sourceData);

            // perform the parallel loop
            Parallel.ForEach(op, (string item, ParallelLoopState loopState, long index) =>
            {
                // process the item
                if (item == "apple")
                {
                    item = "apricot";
                }
                // use the index to set the result in the array
                resultData[index] = item;
            });
            // print out the contents of the result array
            for (int i = 0; i < resultData.Length; i++)
            {
                Console.WriteLine("Item {0} is {1}", i, resultData[i]);
            }
            // wait for input before exiting
            Console.WriteLine("Press enter to finish");
            Console.ReadLine();
        }
Example #23
0
        /// <summary>
        ///  Adds a sequence of values using multiple Tasks / Threads.
        /// </summary>
        /// <param name="source">The sequence to add.</param>
        /// <param name="batchSize">Optional custom batch size to divide work into.</param>
        /// <returns>The sum of the sequence.</returns>
        public static double SumP(this double[] source, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }
            double sum  = 0;
            object LOCK = new object();
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Length, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    () => 0.0,
                                                    (range, s, acc) =>
            {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    acc += source[i];
                }
                return(acc);
            },
                                                    acc =>
            {
                lock (LOCK)
                {
                    sum += acc;
                }
            });


            return(sum);
        }
Example #24
0
 public static ParallelLoopResult ForEach <TSource, TLocal> (OrderablePartitioner <TSource> enumerable, ParallelOptions options,
                                                             Func <TLocal> init,
                                                             Func <TSource, ParallelLoopState, long, TLocal, TLocal> action,
                                                             Action <TLocal> destruct)
 {
     return(ForEach <KeyValuePair <long, TSource>, TLocal> (enumerable.GetOrderablePartitions, options,
                                                            init, (e, s, l) => action(e.Value, s, e.Key, l), destruct));
 }
Example #25
0
        private static bool RunTestWithAlgorithm(int dataSize, int partitionCount, int algorithm)
        {
            //we set up the KeyValuePair in the way that keys and values should always be the same
            //for all partitioning algorithms. So that we can use a bitmap (boolarray) to check whether
            //any elements are missing in the end.
            int[] data = new int[dataSize];
            for (int i = 0; i < dataSize; i++)
            {
                data[i] = i;
            }

            bool passed = true;

            IEnumerator <KeyValuePair <long, int> >[] partitionsUnderTest = new IEnumerator <KeyValuePair <long, int> > [partitionCount];

            //step 1: test GetOrderablePartitions
            DebugMessage(false, () => Console.WriteLine("Testing GetOrderablePartitions"));
            OrderablePartitioner <int> partitioner = PartitioningWithAlgorithm <int>(data, algorithm);
            var partitions1 = partitioner.GetOrderablePartitions(partitionCount);

            //convert it to partition array for testing
            for (int i = 0; i < partitionCount; i++)
            {
                partitionsUnderTest[i] = partitions1[i];
            }

            Assert(partitions1.Count == partitionCount, "RunPartitionerStaticTest_LoadBalanceIList");
            passed &= TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);

            //step 2: test GetOrderableDynamicPartitions
            DebugMessage(false, () => Console.WriteLine("Testing GetOrderableDynamicPartitions"));
            bool gotException = false;

            try
            {
                var partitions2 = partitioner.GetOrderableDynamicPartitions();
                for (int i = 0; i < partitionCount; i++)
                {
                    partitionsUnderTest[i] = partitions2.GetEnumerator();
                }
                passed &= TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);
            }
            catch (NotSupportedException)
            {
                //swallow this exception: static partitioning doesn't support GetOrderableDynamicPartitions
                gotException = true;
            }

            if (IsStaticPartition(algorithm) && !gotException)
            {
                TestHarness.TestLog("Failure: didn't catch \"NotSupportedException\" for static partitioning");
                passed = false;
            }

            return(passed);
        }
        internal override IList <IEnumerable <KeyValuePair <long, T> > > GetOrderedEnumerables(QueryOptions options)
        {
            int       partitionCount;
            IList <T> aggregList = GetAggregatedList(out partitionCount);
            IList <T> result     = ParallelQuickSort <T> .Sort(aggregList, comparison);

            OrderablePartitioner <T> partitioner = ParallelPartitioner.CreateForStrips(result, 1);

            return(WrapHelper.Wrap(partitioner.GetOrderablePartitions(options.PartitionCount)));
        }
Example #27
0
        public IHttpActionResult Partition()
        {
            var array = Enumerable.Range(1, 1000).ToArray <int>();
            //分隔一维数组的一部分。
            ArraySegment <int> segment = new ArraySegment <int>(array);
            // System.Buffer
            OrderablePartitioner <int> data = Partitioner.Create <int>(array, true);

            return(Json(data));
        }
Example #28
0
        /// <summary>
        /// Combines Select and Where into a single call in parallel with indexes
        /// </summary>
        /// <param name="source">The input sequence to filter and select</param>
        /// <param name="selector">The transformation to apply before filtering.</param>
        /// <param name="predicate">The predicate with which to filter result.</param>
        /// <param name="batchSize">Optional. Specify a batch size for Tasks to operate over. </param>
        /// <returns>A sequence transformed and then filtered by selector and predicate.</returns>
        public static TResult[] SelectWhereP <T, TResult>(this T[] source, Func <T, int, TResult> selector, Func <TResult, int, bool> predicate, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (predicate == null)
            {
                throw Error.ArgumentNull("predicate");
            }

            if (selector == null)
            {
                throw Error.ArgumentNull("predicate");
            }

            bool[]    isChosen    = new bool[source.Length];
            TResult[] tempResults = new TResult[source.Length];
            int       count       = 0;
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Length, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner, () => 0,
                                                    (range, loopState, acc) =>
            {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    TResult s = selector(source[i], i);
                    if (predicate(s, i))
                    {
                        isChosen[i]    = true;
                        tempResults[i] = s;
                        acc++;
                    }
                }
                return(acc);
            },
                                                    acc =>
            {
                Interlocked.Add(ref count, acc);
            });

            TResult[] result = new TResult[count];
            int       idx    = 0;

            for (int i = 0; i < isChosen.Length; i++)
            {
                if (isChosen[i])
                {
                    result[idx] = tempResults[i];
                    idx++;
                }
            }
            return(result);
        }
        /// <summary>
        /// Combines Where and Aggregate with index, run the where phase in parallel.
        /// </summary>
        /// <param name="source">The input to filter then aggregate.</param>
        /// <param name="predicate">The function to filter the input sequence and it's index with.</param>
        /// <param name="func">The function to aggregate the filtered sequence.</param>
        /// <param name="batchSize">Optional. Specify a batch size for Tasks to operate over. </param>
        /// <returns>The filtered then aggregated sequence.</returns>
        public static TSource WhereAggregateP <TSource>(this TSource[] source, Func <TSource, int, bool> predicate, Func <TSource, TSource, TSource> func, int?batchSize = null)
        {
            if (source == null)
            {
                throw Error.ArgumentNull(nameof(source));
            }

            if (predicate == null)
            {
                throw Error.ArgumentNull("predicate");
            }

            if (func == null)
            {
                throw Error.ArgumentNull("func");
            }

            bool[] isChosen = new bool[source.Length];
            OrderablePartitioner <Tuple <int, int> > rangePartitioner = MakePartition(source.Length, batchSize);

            System.Threading.Tasks.Parallel.ForEach(rangePartitioner,
                                                    (range, loopState) =>
            {
                for (int i = range.Item1; i < range.Item2; i++)
                {
                    if (predicate(source[i], i))
                    {
                        isChosen[i] = true;
                    }
                }
            });

            TSource result = default(TSource);

            int idx = 0;

            for (; idx < isChosen.Length; idx++)
            {
                if (isChosen[idx])
                {
                    result = source[idx];
                    idx++;
                    break;
                }
            }
            for (; idx < isChosen.Length; idx++)
            {
                if (isChosen[idx])
                {
                    result = func(result, source[idx]);
                }
            }
            return(result);
        }
Example #30
0
        private static void RunTestWithAlgorithm(int dataSize, int partitionCount, int algorithm)
        {
            //we set up the KeyValuePair in the way that keys and values should always be the same
            //for all partitioning algorithms. So that we can use a bitmap (boolarray) to check whether
            //any elements are missing in the end.
            int[] data = new int[dataSize];
            for (int i = 0; i < dataSize; i++)
            {
                data[i] = i;
            }

            IEnumerator <KeyValuePair <long, int> >[] partitionsUnderTest = new IEnumerator <KeyValuePair <long, int> > [partitionCount];

            //step 1: test GetOrderablePartitions
            OrderablePartitioner <int> partitioner = PartitioningWithAlgorithm <int>(data, algorithm);
            var partitions1 = partitioner.GetOrderablePartitions(partitionCount);

            //convert it to partition array for testing
            for (int i = 0; i < partitionCount; i++)
            {
                partitionsUnderTest[i] = partitions1[i];
            }

            if (partitions1.Count != partitionCount)
            {
                Assert.False(true, String.Format(
                                 "RunPartitionerStaticTest_LoadBalanceIList:  FAILED.  partitions1.count: {0} != partitioncount: {1}", partitions1.Count, partitionCount));
            }

            TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);

            //step 2: test GetOrderableDynamicPartitions
            bool gotException = false;

            try
            {
                var partitions2 = partitioner.GetOrderableDynamicPartitions();
                for (int i = 0; i < partitionCount; i++)
                {
                    partitionsUnderTest[i] = partitions2.GetEnumerator();
                }
                TestPartitioningCore(dataSize, partitionCount, data, IsStaticPartition(algorithm), partitionsUnderTest);
            }
            catch (NotSupportedException)
            {
                //swallow this exception: static partitioning doesn't support GetOrderableDynamicPartitions
                gotException = true;
            }

            if (IsStaticPartition(algorithm) && !gotException)
            {
                Assert.False(true, "RunPartitionerStaticTest_LoadBalanceIList: Failure: didn't catch \"NotSupportedException\" for static partitioning");
            }
        }