public void Sort(long startIndex, long count, Comparison <T> comparer) { if (startIndex < 0 || startIndex > _length) { throw new ArgumentOutOfRangeException("startIndex"); } if (count == 0) { return; } long end = startIndex + count; if (count < 0 || end > _length) { throw new ArgumentOutOfRangeException("count"); } if (comparer == null) { comparer = Comparer <T> .Default.Compare; } var parallelSort = new BigArrayParallelSort(); _Sort(parallelSort, startIndex, count, comparer); parallelSort.Wait(); }
public void Sort(Comparison <T> comparer = null) { if (comparer == null) { comparer = Comparer <T> .Default.Compare; } var parallelSort = new BigArrayParallelSort(); _Sort(parallelSort, 0, _length, comparer); parallelSort.Wait(); }
private void _Sort(BigArrayParallelSort parallelSort, long startIndex, long count, Comparison <T> comparer) { if (count <= 1) { return; } long pivotOffset = _Partition(startIndex, count, comparer); // if we don't have more than 10k items, we don't need to try to run in parallel. if (parallelSort == null || count < 10000) { _Sort(parallelSort, startIndex, pivotOffset, comparer); } else { // before putting another task to the threadpool, we verify if the amount of parallel // work is not exceeding the number of CPUs. // Even if the threadpool can be bigger than the number of CPUs, sorting is a no-wait // operation and so putting an extra work to do will only increase the number of task // switches. int parallelCount = Interlocked.Increment(ref BigArrayParallelSort.ParallelSortCount); if (parallelCount >= Environment.ProcessorCount) { // we have too many threads in parallel // (note that the first thread never stops, that's why I used >= operator). Interlocked.Decrement(ref BigArrayParallelSort.ParallelSortCount); // do a normal sub-sort. _Sort(parallelSort, startIndex, pivotOffset, comparer); } else { bool shouldProcessNormally = false; // ok, we have the right to process in parallel, so let's start by saying we // are processing in parallel. Interlocked.Increment(ref parallelSort.ExecutingCount); try { ThreadPool.QueueUserWorkItem ( (x) => { // ok, finally we can sort. But, if an exception is thrown, we should redirect it to the // main thread. try { _Sort(parallelSort, startIndex, pivotOffset, comparer); } catch (Exception exception) { // here we store the exception. lock (parallelSort) { var exceptions = parallelSort.Exceptions; if (exceptions == null) { exceptions = new List <Exception>(); parallelSort.Exceptions = exceptions; } exceptions.Add(exception); } } finally { // Independent if we had an exception or not, we should decrement // both counters. Interlocked.Decrement(ref BigArrayParallelSort.ParallelSortCount); int parallelRemaining = Interlocked.Decrement(ref parallelSort.ExecutingCount); // if we were the last parallel thread, we must notify the main thread if it is waiting // for us. if (parallelRemaining == 0) { lock (parallelSort) Monitor.Pulse(parallelSort); } } } ); } catch { // if an exception was thrown trying to call the thread pool, we simple reduce the // count number and do the sort normally. // The sort is out of the catch in case an Abort is done. Interlocked.Decrement(ref parallelSort.ExecutingCount); Interlocked.Decrement(ref BigArrayParallelSort.ParallelSortCount); shouldProcessNormally = true; } if (shouldProcessNormally) { _Sort(parallelSort, startIndex, pivotOffset, comparer); } } } _Sort(parallelSort, startIndex + pivotOffset + 1, count - pivotOffset - 1, comparer); }