/// <summary>
        /// A variant of WrapEnumerable that accepts a QueryOperatorEnumerator{,} instead of an IEnumerable{}.
        /// The code duplication is necessary to avoid extra virtual method calls that would otherwise be needed to
        /// convert the QueryOperatorEnumerator{,} to an IEnumerator{}.
        /// </summary>
        internal static IEnumerable <TElement> WrapQueryEnumerator <TElement, TIgnoreKey>(QueryOperatorEnumerator <TElement, TIgnoreKey> source,
                                                                                          CancellationState cancellationState)
        {
            TElement   elem      = default(TElement);
            TIgnoreKey ignoreKey = default(TIgnoreKey);

            try
            {
                while (true)
                {
                    try
                    {
                        if (!source.MoveNext(ref elem, ref ignoreKey))
                        {
                            yield break;
                        }
                    }
                    catch (ThreadAbortException)
                    {
                        // Do not wrap ThreadAbortExceptions
                        throw;
                    }
                    catch (Exception ex)
                    {
                        ThrowOCEorAggregateException(ex, cancellationState);
                    }

                    yield return(elem);
                }
            }
            finally
            {
                source.Dispose();
            }
        }
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TResult currentElement, ref TKey currentKey)
            {
                // If the buffer has not been created, we will generate it lazily on demand.
                if (_buffer == null)
                {
                    // Create a buffer, but don't publish it yet (in case of exception).
                    List <Pair <TResult, TKey> > buffer = new List <Pair <TResult, TKey> >();

                    // Enter the search phase.  In this phase, we scan the input until one of three
                    // things happens:  (1) all input has been exhausted, (2) the predicate yields
                    // false for one of our elements, or (3) we move past the current lowest index
                    // found by other partitions for a false element.  As we go, we have to remember
                    // the elements by placing them into the buffer.

                    try
                    {
                        TResult current = default(TResult);
                        TKey    key     = default(TKey);
                        int     i       = 0; //counter to help with cancellation
                        while (_source.MoveNext(ref current, ref key))
                        {
                            if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                            {
                                CancellationState.ThrowIfCanceled(_cancellationToken);
                            }

                            // Add the current element to our buffer.
                            buffer.Add(new Pair <TResult, TKey>(current, key));

                            // See if another partition has found a false value before this element. If so,
                            // we should stop scanning the input now and reach the barrier ASAP.
                            if (_updatesSeen != _operatorState._updatesDone)
                            {
                                lock (_operatorState)
                                {
                                    _currentLowKey = _operatorState._currentLowKey;
                                    _updatesSeen   = _operatorState._updatesDone;
                                }
                            }

                            if (_updatesSeen > 0 && _keyComparer.Compare(key, _currentLowKey) > 0)
                            {
                                break;
                            }

                            // Evaluate the predicate, either indexed or not based on info passed to the ctor.
                            bool predicateResult;
                            if (_predicate != null)
                            {
                                predicateResult = _predicate(current);
                            }
                            else
                            {
                                Debug.Assert(_indexedPredicate != null);
                                predicateResult = _indexedPredicate(current, key);
                            }

                            if (!predicateResult)
                            {
                                // Signal that we've found a false element, racing with other partitions to
                                // set the shared index value.
                                lock (_operatorState)
                                {
                                    if (_operatorState._updatesDone == 0 || _keyComparer.Compare(_operatorState._currentLowKey, key) > 0)
                                    {
                                        _currentLowKey = _operatorState._currentLowKey = key;
                                        _updatesSeen   = ++_operatorState._updatesDone;
                                    }
                                }

                                break;
                            }
                        }
                    }
                    finally
                    {
                        // No matter whether we exit due to an exception or normal completion, we must ensure
                        // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                        _sharedBarrier.Signal();
                    }

                    // Before exiting the search phase, we will synchronize with others. This is a barrier.
                    _sharedBarrier.Wait(_cancellationToken);

                    // Publish the buffer and set the index to just before the 1st element.
                    _buffer      = buffer;
                    _bufferIndex = new Shared <int>(-1);
                }

                // Now either enter (or continue) the yielding phase. As soon as we reach this, we know the
                // current shared "low false" value is the absolute lowest with a false.
                if (_take)
                {
                    // In the case of a take-while, we will yield each element from our buffer for which
                    // the element is lesser than the lowest false index found.
                    if (_bufferIndex.Value >= _buffer.Count - 1)
                    {
                        return(false);
                    }

                    // Increment the index, and remember the values.
                    ++_bufferIndex.Value;
                    currentElement = _buffer[_bufferIndex.Value].First;
                    currentKey     = _buffer[_bufferIndex.Value].Second;

                    return(_operatorState._updatesDone == 0 || _keyComparer.Compare(_operatorState._currentLowKey, currentKey) > 0);
                }
                else
                {
                    // If no false was found, the output is empty.
                    if (_operatorState._updatesDone == 0)
                    {
                        return(false);
                    }

                    // In the case of a skip-while, we must skip over elements whose index is lesser than the
                    // lowest index found. Once we've exhausted the buffer, we must go back and continue
                    // enumerating the data source until it is empty.
                    if (_bufferIndex.Value < _buffer.Count - 1)
                    {
                        for (_bufferIndex.Value++; _bufferIndex.Value < _buffer.Count; _bufferIndex.Value++)
                        {
                            // If the current buffered element's index is greater than or equal to the smallest
                            // false index found, we will yield it as a result.
                            if (_keyComparer.Compare(_buffer[_bufferIndex.Value].Second, _operatorState._currentLowKey) >= 0)
                            {
                                currentElement = _buffer[_bufferIndex.Value].First;
                                currentKey     = _buffer[_bufferIndex.Value].Second;
                                return(true);
                            }
                        }
                    }

                    // Lastly, so long as our input still has elements, they will be yieldable.
                    if (_source.MoveNext(ref currentElement, ref currentKey))
                    {
                        Debug.Assert(_keyComparer.Compare(currentKey, _operatorState._currentLowKey) > 0,
                                     "expected remaining element indices to be greater than smallest");
                        return(true);
                    }
                }

                return(false);
            }
Esempio n. 3
0
        //-----------------------------------------------------------------------------------
        // Marks the end of a query's execution, waiting for all tasks to finish and
        // propagating any relevant exceptions.  Note that the full set of tasks must have
        // been initialized (with SetTask) before calling this.
        //

        internal void QueryEnd(bool userInitiatedDispose)
        {
            Debug.Assert(_rootTask != null);
            //Debug.Assert(Task.Current == null || (Task.Current != _rootTask && Task.Current.Parent != _rootTask));

            if (Interlocked.Exchange(ref _alreadyEnded, 1) == 0)
            {
                // There are four cases:
                // Case #1: Wait produced an exception that is not OCE(ct), or an AggregateException which is not full of OCE(ct) ==>  We rethrow.
                // Case #2: External cancellation has been requested ==> we'll manually throw OCE(externalToken).
                // Case #3a: We are servicing a call to Dispose() (and possibly also external cancellation has been requested).. simply return.
                // Case #3b: The enumerator has already been disposed (and possibly also external cancellation was requested).  Throw an ODE.
                // Case #4: No exceptions or explicit call to Dispose() by this caller ==> we just return.

                // See also "InlinedAggregationOperator" which duplicates some of this logic for the aggregators.
                // See also "QueryOpeningEnumerator" which duplicates some of this logic.
                // See also "ExceptionAggregator" which duplicates some of this logic.

                try
                {
                    // Wait for all the tasks to complete
                    // If any of the tasks ended in the Faulted stated, an AggregateException will be thrown.
                    _rootTask.Wait();
                }
                catch (AggregateException ae)
                {
                    AggregateException flattenedAE = ae.Flatten();
                    bool allOCEsOnTrackedExternalCancellationToken = true;
                    for (int i = 0; i < flattenedAE.InnerExceptions.Count; i++)
                    {
                        OperationCanceledException?oce = flattenedAE.InnerExceptions[i] as OperationCanceledException;

                        // we only let it pass through iff:
                        // it is not null, not default, and matches the exact token we were given as being the external token
                        // and the external Token is actually canceled (i.e. not a spoof OCE(extCT) for a non-canceled extCT)
                        if (oce == null ||
                            !oce.CancellationToken.IsCancellationRequested ||
                            oce.CancellationToken != _cancellationState.ExternalCancellationToken)
                        {
                            allOCEsOnTrackedExternalCancellationToken = false;
                            break;
                        }
                    }

                    // if all the exceptions were OCE(externalToken), then we will propagate only a single OCE(externalToken) below
                    // otherwise, we flatten the aggregate (because the WaitAll above already aggregated) and rethrow.
                    if (!allOCEsOnTrackedExternalCancellationToken || flattenedAE.InnerExceptions.Count == 0)
                    {
                        throw flattenedAE;  // Case #1
                    }
                }
                finally
                {
                    //_rootTask don't support Dispose on some platforms
                    (_rootTask as IDisposable)?.Dispose();
                }

                if (_cancellationState.MergedCancellationToken.IsCancellationRequested)
                {
                    // cancellation has occurred but no user-delegate exceptions were detected

                    // NOTE: it is important that we see other state variables correctly here, and that
                    // read-reordering hasn't played havoc.
                    // This is OK because
                    //   1. all the state writes (e,g. in the Initiate* methods) are volatile writes (standard .NET MM)
                    //   2. tokenCancellationRequested is backed by a volatile field, hence the reads below
                    //   won't get reordered about the read of token.IsCancellationRequested.

                    // If the query has already been disposed, we don't want to throw an OCE
                    if (!_cancellationState.TopLevelDisposedFlag.Value)
                    {
                        CancellationState.ThrowWithStandardMessageIfCanceled(_cancellationState.ExternalCancellationToken); // Case #2
                    }

                    //otherwise, given that there were no user-delegate exceptions (they would have been rethrown above),
                    //the only remaining situation is user-initiated dispose.
                    Debug.Assert(_cancellationState.TopLevelDisposedFlag.Value);

                    // If we aren't actively disposing, that means somebody else previously disposed
                    // of the enumerator. We must throw an ObjectDisposedException.
                    if (!userInitiatedDispose)
                    {
                        throw new ObjectDisposedException("enumerator", SR.PLINQ_DisposeRequested); // Case #3
                    }
                }
                // Case #4. nothing to do.
            }
        }
Esempio n. 4
0
        //---------------------------------------------------------------------------------------
        // MoveNext implements all the hash-join logic noted earlier. When it is called first, it
        // will execute the entire inner query tree, and build a hash-table lookup. This is the
        // Building phase. Then for the first call and all subsequent calls to MoveNext, we will
        // incrementally perform the Probing phase. We'll keep getting elements from the outer
        // data source, looking into the hash-table we built, and enumerating the full results.
        //
        // This routine supports both inner and outer (group) joins. An outer join will yield a
        // (possibly empty) list of matching elements from the inner instead of one-at-a-time,
        // as we do for inner joins.
        //

        internal override bool MoveNext(ref TOutput currentElement, ref TOutputKey currentKey)
        {
            Debug.Assert(_resultSelector != null, "expected a compiled result selector");
            Debug.Assert(_leftSource != null);
            Debug.Assert(_rightLookupBuilder != null);

            // BUILD phase: If we haven't built the hash-table yet, create that first.
            Mutables mutables = _mutables;

            if (mutables == null)
            {
                mutables = _mutables = new Mutables();
                mutables._rightHashLookup = _rightLookupBuilder.BuildHashLookup(_cancellationToken);
            }

            // PROBE phase: So long as the source has a next element, return the match.
            ListChunk <Pair <TRightInput, TRightKey> > currentRightChunk = mutables._currentRightMatches;

            if (currentRightChunk != null && mutables._currentRightMatchesIndex == currentRightChunk.Count)
            {
                mutables._currentRightMatches      = currentRightChunk.Next;
                mutables._currentRightMatchesIndex = 0;
            }

            if (mutables._currentRightMatches == null)
            {
                // We have to look up the next list of matches in the hash-table.
                Pair <TLeftInput, THashKey> leftPair = default(Pair <TLeftInput, THashKey>);
                TLeftKey leftKey = default(TLeftKey);
                while (_leftSource.MoveNext(ref leftPair, ref leftKey))
                {
                    if ((mutables._outputLoopCount++ & CancellationState.POLL_INTERVAL) == 0)
                    {
                        CancellationState.ThrowIfCanceled(_cancellationToken);
                    }

                    // Find the match in the hash table.
                    HashLookupValueList <TRightInput, TRightKey> matchValue = default(HashLookupValueList <TRightInput, TRightKey>);
                    TLeftInput leftElement = leftPair.First;
                    THashKey   leftHashKey = leftPair.Second;

                    // Ignore null keys.
                    if (leftHashKey != null)
                    {
                        if (mutables._rightHashLookup.TryGetValue(leftHashKey, ref matchValue))
                        {
                            // We found a new match. We remember the list in case there are multiple
                            // values under this same key -- the next iteration will pick them up.
                            mutables._currentRightMatches = matchValue.Tail;
                            Debug.Assert(mutables._currentRightMatches == null || mutables._currentRightMatches.Count > 0,
                                         "we were expecting that the list would be either null or empty");
                            mutables._currentRightMatchesIndex = 0;

                            // Yield the value.
                            currentElement = _resultSelector(leftElement, matchValue.Head.First);
                            currentKey     = _outputKeyBuilder.Combine(leftKey, matchValue.Head.Second);

                            // If there is a list of matches, remember the left values for next time.
                            if (matchValue.Tail != null)
                            {
                                mutables._currentLeft    = leftElement;
                                mutables._currentLeftKey = leftKey;
                            }

                            return(true);
                        }
                    }
                }

                // If we've reached the end of the data source, we're done.
                return(false);
            }

            // Produce the next element.
            Debug.Assert(mutables._currentRightMatches != null);
            Debug.Assert(0 <= mutables._currentRightMatchesIndex && mutables._currentRightMatchesIndex < mutables._currentRightMatches.Count);

            Pair <TRightInput, TRightKey> rightMatch = mutables._currentRightMatches._chunk[mutables._currentRightMatchesIndex];

            currentElement = _resultSelector(mutables._currentLeft, rightMatch.First);
            currentKey     = _outputKeyBuilder.Combine(mutables._currentLeftKey, rightMatch.Second);

            mutables._currentRightMatchesIndex++;

            return(true);
        }
Esempio n. 5
0
        //---------------------------------------------------------------------------------------
        // Called when this enumerator is first enumerated; it must walk through the source
        // and redistribute elements to their slot in the exchange matrix.
        //

        private void EnumerateAndRedistributeElements()
        {
            Mutables mutables = _mutables;

            Debug.Assert(mutables != null);

            ListChunk <Pair>[] privateBuffers = new ListChunk <Pair> [_partitionCount];

            TInputOutput element   = default(TInputOutput);
            TIgnoreKey   ignoreKey = default(TIgnoreKey);
            int          loopCount = 0;

            while (_source.MoveNext(ref element, ref ignoreKey))
            {
                if ((loopCount++ & CancellationState.POLL_INTERVAL) == 0)
                {
                    CancellationState.ThrowIfCanceled(_cancellationToken);
                }

                // Calculate the element's destination partition index, placing it into the
                // appropriate buffer from which partitions will later enumerate.
                int      destinationIndex;
                THashKey elementHashKey = default(THashKey);
                if (_keySelector != null)
                {
                    elementHashKey   = _keySelector(element);
                    destinationIndex = _repartitionStream.GetHashCode(elementHashKey) % _partitionCount;
                }
                else
                {
                    Debug.Assert(typeof(THashKey) == typeof(NoKeyMemoizationRequired));
                    destinationIndex = _repartitionStream.GetHashCode(element) % _partitionCount;
                }

                Debug.Assert(0 <= destinationIndex && destinationIndex < _partitionCount,
                             "destination partition outside of the legal range of partitions");

                // Get the buffer for the destination partition, lazily allocating if needed.  We maintain
                // this list in our own private cache so that we avoid accessing shared memory locations
                // too much.  In the original implementation, we'd access the buffer in the matrix ([N,M],
                // where N is the current partition and M is the destination), but some rudimentary
                // performance profiling indicates copying at the end performs better.
                ListChunk <Pair> buffer = privateBuffers[destinationIndex];
                if (buffer == null)
                {
                    const int INITIAL_PRIVATE_BUFFER_SIZE = 128;
                    privateBuffers[destinationIndex] = buffer = new ListChunk <Pair>(INITIAL_PRIVATE_BUFFER_SIZE);
                }

                buffer.Add(new Pair(element, elementHashKey));
            }

            // Copy the local buffers to the shared space and then signal to other threads that
            // we are done.  We can then immediately move on to enumerating the elements we found
            // for the current partition before waiting at the barrier.  If we found a lot, we will
            // hopefully never have to physically wait.
            for (int i = 0; i < _partitionCount; i++)
            {
                _valueExchangeMatrix[_partitionIndex][i] = privateBuffers[i];
            }

            _barrier.Signal();

            // Begin at our own buffer.
            mutables._currentBufferIndex = _partitionIndex;
            mutables._currentBuffer      = privateBuffers[_partitionIndex];
            mutables._currentIndex       = ENUMERATION_NOT_STARTED;
        }
Esempio n. 6
0
        //---------------------------------------------------------------------------------------
        // MoveNext implements all the hash-join logic noted earlier. When it is called first, it
        // will execute the entire inner query tree, and build a hash-table lookup. This is the
        // Building phase. Then for the first call and all subsequent calls to MoveNext, we will
        // incrementally perform the Probing phase. We'll keep getting elements from the outer
        // data source, looking into the hash-table we built, and enumerating the full results.
        //
        // This routine supports both inner and outer (group) joins. An outer join will yield a
        // (possibly empty) list of matching elements from the inner instead of one-at-a-time,
        // as we do for inner joins.
        //

        internal override bool MoveNext(ref TOutput currentElement, ref TLeftKey currentKey)
        {
            Contract.Assert(_singleResultSelector != null || _groupResultSelector != null, "expected a compiled result selector");
            Contract.Assert(_leftSource != null);
            Contract.Assert(_rightSource != null);

            // BUILD phase: If we haven't built the hash-table yet, create that first.
            Mutables mutables = _mutables;

            if (mutables == null)
            {
                mutables = _mutables = new Mutables();
#if DEBUG
                int hashLookupCount   = 0;
                int hashKeyCollisions = 0;
#endif
                mutables._rightHashLookup = new HashLookup <THashKey, Pair>(_keyComparer);

                Pair rightPair      = new Pair(default(TRightInput), default(THashKey));
                int  rightKeyUnused = default(int);
                int  i = 0;
                while (_rightSource.MoveNext(ref rightPair, ref rightKeyUnused))
                {
                    if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                    {
                        CancellationState.ThrowIfCanceled(_cancellationToken);
                    }

                    TRightInput rightElement = (TRightInput)rightPair.First;
                    THashKey    rightHashKey = (THashKey)rightPair.Second;

                    // We ignore null keys.
                    if (rightHashKey != null)
                    {
#if DEBUG
                        hashLookupCount++;
#endif

                        // See if we've already stored an element under the current key. If not, we
                        // lazily allocate a pair to hold the elements mapping to the same key.
                        const int INITIAL_CHUNK_SIZE = 2;
                        Pair      currentValue       = new Pair(default(TRightInput), default(ListChunk <TRightInput>));
                        if (!mutables._rightHashLookup.TryGetValue(rightHashKey, ref currentValue))
                        {
                            currentValue = new Pair(rightElement, null);

                            if (_groupResultSelector != null)
                            {
                                // For group joins, we also add the element to the list. This makes
                                // it easier later to yield the list as-is.
                                currentValue.Second = new ListChunk <TRightInput>(INITIAL_CHUNK_SIZE);
                                ((ListChunk <TRightInput>)currentValue.Second).Add((TRightInput)rightElement);
                            }

                            mutables._rightHashLookup.Add(rightHashKey, currentValue);
                        }
                        else
                        {
                            if (currentValue.Second == null)
                            {
                                // Lazily allocate a list to hold all but the 1st value. We need to
                                // re-store this element because the pair is a value type.
                                currentValue.Second = new ListChunk <TRightInput>(INITIAL_CHUNK_SIZE);
                                mutables._rightHashLookup[rightHashKey] = currentValue;
                            }

                            ((ListChunk <TRightInput>)currentValue.Second).Add((TRightInput)rightElement);
#if DEBUG
                            hashKeyCollisions++;
#endif
                        }
                    }
                }

#if DEBUG
                TraceHelpers.TraceInfo("ParallelJoinQueryOperator::MoveNext - built hash table [count = {0}, collisions = {1}]",
                                       hashLookupCount, hashKeyCollisions);
#endif
            }

            // PROBE phase: So long as the source has a next element, return the match.
            ListChunk <TRightInput> currentRightChunk = mutables._currentRightMatches;
            if (currentRightChunk != null && mutables._currentRightMatchesIndex == currentRightChunk.Count)
            {
                currentRightChunk = mutables._currentRightMatches = currentRightChunk.Next;
                mutables._currentRightMatchesIndex = 0;
            }

            if (mutables._currentRightMatches == null)
            {
                // We have to look up the next list of matches in the hash-table.
                Pair     leftPair = new Pair(default(TLeftInput), default(THashKey));
                TLeftKey leftKey  = default(TLeftKey);
                while (_leftSource.MoveNext(ref leftPair, ref leftKey))
                {
                    if ((mutables._outputLoopCount++ & CancellationState.POLL_INTERVAL) == 0)
                    {
                        CancellationState.ThrowIfCanceled(_cancellationToken);
                    }

                    // Find the match in the hash table.
                    Pair       matchValue  = new Pair(default(TRightInput), default(ListChunk <TRightInput>));
                    TLeftInput leftElement = (TLeftInput)leftPair.First;
                    THashKey   leftHashKey = (THashKey)leftPair.Second;

                    // Ignore null keys.
                    if (leftHashKey != null)
                    {
                        if (mutables._rightHashLookup.TryGetValue(leftHashKey, ref matchValue))
                        {
                            // We found a new match. For inner joins, we remember the list in case
                            // there are multiple value under this same key -- the next iteration will pick
                            // them up. For outer joins, we will use the list momentarily.
                            if (_singleResultSelector != null)
                            {
                                mutables._currentRightMatches = (ListChunk <TRightInput>)matchValue.Second;
                                Contract.Assert(mutables._currentRightMatches == null || mutables._currentRightMatches.Count > 0,
                                                "we were expecting that the list would be either null or empty");
                                mutables._currentRightMatchesIndex = 0;

                                // Yield the value.
                                currentElement = _singleResultSelector(leftElement, (TRightInput)matchValue.First);
                                currentKey     = leftKey;

                                // If there is a list of matches, remember the left values for next time.
                                if (matchValue.Second != null)
                                {
                                    mutables._currentLeft    = leftElement;
                                    mutables._currentLeftKey = leftKey;
                                }

                                return(true);
                            }
                        }
                    }

                    // For outer joins, we always yield a result.
                    if (_groupResultSelector != null)
                    {
                        // Grab the matches, or create an empty list if there are none.
                        IEnumerable <TRightInput> matches = (ListChunk <TRightInput>)matchValue.Second;
                        if (matches == null)
                        {
                            matches = ParallelEnumerable.Empty <TRightInput>();
                        }

                        // Generate the current value.
                        currentElement = _groupResultSelector(leftElement, matches);
                        currentKey     = leftKey;
                        return(true);
                    }
                }

                // If we've reached the end of the data source, we're done.
                return(false);
            }

            // Produce the next element and increment our index within the matches.
            Contract.Assert(_singleResultSelector != null);
            Contract.Assert(mutables._currentRightMatches != null);
            Contract.Assert(0 <= mutables._currentRightMatchesIndex && mutables._currentRightMatchesIndex < mutables._currentRightMatches.Count);

            currentElement = _singleResultSelector(
                mutables._currentLeft, mutables._currentRightMatches._chunk[mutables._currentRightMatchesIndex]);
            currentKey = mutables._currentLeftKey;

            mutables._currentRightMatchesIndex++;

            return(true);
        }
            //---------------------------------------------------------------------------------------
            // Walks the two data sources, left and then right, to produce the intersection.
            //

            internal override bool MoveNext(ref TInputOutput currentElement, ref TLeftKey currentKey)
            {
                Contract.Assert(m_leftSource != null);
                Contract.Assert(m_rightSource != null);

                // Build the set out of the left data source, if we haven't already.
                int i = 0;

                if (m_hashLookup == null)
                {
                    // @TODO: @PERF: @BUG#594: different implementation of set operations. Consider a treap.
                    m_hashLookup = new Dictionary <Wrapper <TInputOutput>, Pair <TInputOutput, TLeftKey> >(m_comparer);

                    Pair <TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);
                    TLeftKey leftKey = default(TLeftKey);
                    while (m_leftSource.MoveNext(ref leftElement, ref leftKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        // For each element, we track the smallest order key for that element that we saw so far
                        Pair <TInputOutput, TLeftKey> oldEntry;
                        Wrapper <TInputOutput>        wrappedLeftElem = new Wrapper <TInputOutput>(leftElement.First);

                        // If this is the first occurence of this element, or the order key is lower than all keys we saw previously,
                        // update the order key for this element.
                        if (!m_hashLookup.TryGetValue(wrappedLeftElem, out oldEntry) || m_leftKeyComparer.Compare(leftKey, oldEntry.Second) < 0)
                        {
                            // For each "elem" value, we store the smallest key, and the element value that had that key.
                            // Note that even though two element values are "equal" according to the EqualityComparer,
                            // we still cannot choose arbitrarily which of the two to yield.
                            m_hashLookup[wrappedLeftElem] = new Pair <TInputOutput, TLeftKey>(leftElement.First, leftKey);
                        }
                    }
                }

                // Now iterate over the right data source, looking for matches.
                Pair <TInputOutput, NoKeyMemoizationRequired> rightElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);
                int rightKeyUnused = default(int);

                while (m_rightSource.MoveNext(ref rightElement, ref rightKeyUnused))
                {
                    if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                    {
                        CancellationState.ThrowIfCanceled(m_cancellationToken);
                    }

                    // If we found the element in our set, and if we haven't returned it yet,
                    // we can yield it to the caller. We also mark it so we know we've returned
                    // it once already and never will again.

                    Pair <TInputOutput, TLeftKey> entry;
                    Wrapper <TInputOutput>        wrappedRightElem = new Wrapper <TInputOutput>(rightElement.First);

                    if (m_hashLookup.TryGetValue(wrappedRightElem, out entry))
                    {
                        currentElement = entry.First;
                        currentKey     = entry.Second;

                        // @TODO: @PERF: avoid the double lookup required to remove the element
                        //     we have already found in the hashtable.
                        m_hashLookup.Remove(new Wrapper <TInputOutput>(entry.First));
                        return(true);
                    }
                }

                return(false);
            }
        private void MergeSortCooperatively()
        {
            CancellationToken mergedCancellationToken = this.m_groupState.CancellationState.MergedCancellationToken;
            int length = this.m_sharedBarriers.GetLength(0);

            for (int i = 0; i < length; i++)
            {
                bool flag = i == (length - 1);
                int  num3 = this.ComputePartnerIndex(i);
                if (num3 < this.m_partitionCount)
                {
                    int[] numArray               = this.m_sharedIndices[this.m_partitionIndex];
                    GrowingArray <TKey> array    = this.m_sharedKeys[this.m_partitionIndex];
                    TKey[]         internalArray = array.InternalArray;
                    TInputOutput[] sourceArray   = this.m_sharedValues[this.m_partitionIndex];
                    this.m_sharedBarriers[i, Math.Min(this.m_partitionIndex, num3)].SignalAndWait(mergedCancellationToken);
                    if (this.m_partitionIndex >= num3)
                    {
                        this.m_sharedBarriers[i, num3].SignalAndWait(mergedCancellationToken);
                        int[]               numArray4   = this.m_sharedIndices[this.m_partitionIndex];
                        TKey[]              localArray6 = this.m_sharedKeys[this.m_partitionIndex].InternalArray;
                        TInputOutput[]      localArray7 = this.m_sharedValues[this.m_partitionIndex];
                        int[]               numArray5   = this.m_sharedIndices[num3];
                        GrowingArray <TKey> array2      = this.m_sharedKeys[num3];
                        TInputOutput[]      localArray8 = this.m_sharedValues[num3];
                        int destinationIndex            = localArray7.Length;
                        int num12 = sourceArray.Length;
                        int num13 = destinationIndex + num12;
                        int num14 = (num13 + 1) / 2;
                        int num15 = num13 - 1;
                        int num16 = destinationIndex - 1;
                        int num17 = num12 - 1;
                        while (num15 >= num14)
                        {
                            if ((num15 & 0x3f) == 0)
                            {
                                CancellationState.ThrowIfCanceled(mergedCancellationToken);
                            }
                            if ((num16 >= 0) && ((num17 < 0) || (this.m_keyComparer.Compare(localArray6[numArray4[num16]], internalArray[numArray[num17]]) > 0)))
                            {
                                if (flag)
                                {
                                    localArray8[num15] = localArray7[numArray4[num16]];
                                }
                                else
                                {
                                    numArray5[num15] = numArray4[num16];
                                }
                                num16--;
                            }
                            else
                            {
                                if (flag)
                                {
                                    localArray8[num15] = sourceArray[numArray[num17]];
                                }
                                else
                                {
                                    numArray5[num15] = destinationIndex + numArray[num17];
                                }
                                num17--;
                            }
                            num15--;
                        }
                        if (!flag && (sourceArray.Length > 0))
                        {
                            array2.CopyFrom(internalArray, sourceArray.Length);
                            Array.Copy(sourceArray, 0, localArray8, destinationIndex, sourceArray.Length);
                        }
                        this.m_sharedBarriers[i, num3].SignalAndWait(mergedCancellationToken);
                        return;
                    }
                    int[]          numArray2   = this.m_sharedIndices[num3];
                    TKey[]         localArray3 = this.m_sharedKeys[num3].InternalArray;
                    TInputOutput[] localArray4 = this.m_sharedValues[num3];
                    this.m_sharedIndices[num3] = numArray;
                    this.m_sharedKeys[num3]    = array;
                    this.m_sharedValues[num3]  = sourceArray;
                    int            num4             = sourceArray.Length;
                    int            num5             = localArray4.Length;
                    int            num6             = num4 + num5;
                    int[]          numArray3        = null;
                    TInputOutput[] destinationArray = new TInputOutput[num6];
                    if (!flag)
                    {
                        numArray3 = new int[num6];
                    }
                    this.m_sharedIndices[this.m_partitionIndex] = numArray3;
                    this.m_sharedKeys[this.m_partitionIndex]    = array;
                    this.m_sharedValues[this.m_partitionIndex]  = destinationArray;
                    this.m_sharedBarriers[i, this.m_partitionIndex].SignalAndWait(mergedCancellationToken);
                    int num7  = (num6 + 1) / 2;
                    int index = 0;
                    int num9  = 0;
                    int num10 = 0;
                    while (index < num7)
                    {
                        if ((index & 0x3f) == 0)
                        {
                            CancellationState.ThrowIfCanceled(mergedCancellationToken);
                        }
                        if ((num9 < num4) && ((num10 >= num5) || (this.m_keyComparer.Compare(internalArray[numArray[num9]], localArray3[numArray2[num10]]) <= 0)))
                        {
                            if (flag)
                            {
                                destinationArray[index] = sourceArray[numArray[num9]];
                            }
                            else
                            {
                                numArray3[index] = numArray[num9];
                            }
                            num9++;
                        }
                        else
                        {
                            if (flag)
                            {
                                destinationArray[index] = localArray4[numArray2[num10]];
                            }
                            else
                            {
                                numArray3[index] = num4 + numArray2[num10];
                            }
                            num10++;
                        }
                        index++;
                    }
                    if (!flag && (num4 > 0))
                    {
                        Array.Copy(sourceArray, 0, destinationArray, 0, num4);
                    }
                    this.m_sharedBarriers[i, this.m_partitionIndex].SignalAndWait(mergedCancellationToken);
                }
            }
        }
Esempio n. 9
0
 internal PartitionedStreamMerger(bool forEffectMerge, ParallelMergeOptions mergeOptions, TaskScheduler taskScheduler, bool outputOrdered, CancellationState cancellationState, int queryId)
 {
     this.m_forEffectMerge    = forEffectMerge;
     this.m_mergeOptions      = mergeOptions;
     this.m_isOrdered         = outputOrdered;
     this.m_taskScheduler     = taskScheduler;
     this.m_cancellationState = cancellationState;
     this.m_queryId           = queryId;
 }
Esempio n. 10
0
            internal override bool MoveNext(ref TResult currentElement, ref int currentKey)
            {
                if ((this.m_buffer == null) && (this.m_count > 0))
                {
                    List <Pair <TResult, int> > list = new List <Pair <TResult, int> >();
                    TResult local = default(TResult);
                    int     num   = 0;
                    int     num2  = 0;
                    while ((list.Count < this.m_count) && this.m_source.MoveNext(ref local, ref num))
                    {
                        if ((num2++ & 0x3f) == 0)
                        {
                            CancellationState.ThrowIfCanceled(this.m_cancellationToken);
                        }
                        list.Add(new Pair <TResult, int>(local, num));
                        lock (this.m_sharedIndices)
                        {
                            if (!this.m_sharedIndices.Insert(num))
                            {
                                break;
                            }
                            continue;
                        }
                    }
                    this.m_sharedBarrier.Signal();
                    this.m_sharedBarrier.Wait(this.m_cancellationToken);
                    this.m_buffer      = list;
                    this.m_bufferIndex = new Shared <int>(-1);
                }
                if (this.m_take)
                {
                    if ((this.m_count == 0) || (this.m_bufferIndex.Value >= (this.m_buffer.Count - 1)))
                    {
                        return(false);
                    }
                    this.m_bufferIndex.Value += 1;
                    Pair <TResult, int> pair = this.m_buffer[this.m_bufferIndex.Value];
                    currentElement = pair.First;
                    Pair <TResult, int> pair2 = this.m_buffer[this.m_bufferIndex.Value];
                    currentKey = pair2.Second;
                    int maxValue = this.m_sharedIndices.MaxValue;
                    if (maxValue != -1)
                    {
                        Pair <TResult, int> pair3 = this.m_buffer[this.m_bufferIndex.Value];
                        return(pair3.Second <= maxValue);
                    }
                    return(true);
                }
                int num4 = -1;

                if (this.m_count > 0)
                {
                    if (this.m_sharedIndices.Count < this.m_count)
                    {
                        return(false);
                    }
                    num4 = this.m_sharedIndices.MaxValue;
                    if (this.m_bufferIndex.Value < (this.m_buffer.Count - 1))
                    {
                        this.m_bufferIndex.Value += 1;
                        while (this.m_bufferIndex.Value < this.m_buffer.Count)
                        {
                            Pair <TResult, int> pair4 = this.m_buffer[this.m_bufferIndex.Value];
                            if (pair4.Second > num4)
                            {
                                Pair <TResult, int> pair5 = this.m_buffer[this.m_bufferIndex.Value];
                                currentElement = pair5.First;
                                Pair <TResult, int> pair6 = this.m_buffer[this.m_bufferIndex.Value];
                                currentKey = pair6.Second;
                                return(true);
                            }
                            this.m_bufferIndex.Value += 1;
                        }
                    }
                }
                return(this.m_source.MoveNext(ref currentElement, ref currentKey));
            }
Esempio n. 11
0
            //---------------------------------------------------------------------------------------
            // Walks the two data sources, left and then right, to produce the union.
            //

            internal override bool MoveNext(ref TInputOutput currentElement, ref ConcatKey <TLeftKey, TRightKey> currentKey)
            {
                Contract.Assert(m_leftSource != null);
                Contract.Assert(m_rightSource != null);

                if (m_outputEnumerator == null)
                {
                    IEqualityComparer <Wrapper <TInputOutput> > wrapperComparer = new WrapperEqualityComparer <TInputOutput>(m_comparer);
                    Dictionary <Wrapper <TInputOutput>, Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> > > union =
                        new Dictionary <Wrapper <TInputOutput>, Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> > >(wrapperComparer);

                    Pair <TInputOutput, NoKeyMemoizationRequired> elem = default(Pair <TInputOutput, NoKeyMemoizationRequired>);
                    TLeftKey leftKey = default(TLeftKey);

                    int i = 0;
                    while (m_leftSource.MoveNext(ref elem, ref leftKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        ConcatKey <TLeftKey, TRightKey> key =
                            ConcatKey <TLeftKey, TRightKey> .MakeLeft(m_leftOrdered?leftKey : default(TLeftKey));

                        Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> > oldEntry;
                        Wrapper <TInputOutput> wrappedElem = new Wrapper <TInputOutput>(elem.First);

                        if (!union.TryGetValue(wrappedElem, out oldEntry) || m_keyComparer.Compare(key, oldEntry.Second) < 0)
                        {
                            union[wrappedElem] = new Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> >(elem.First, key);
                        }
                    }

                    TRightKey rightKey = default(TRightKey);
                    while (m_rightSource.MoveNext(ref elem, ref rightKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        ConcatKey <TLeftKey, TRightKey> key =
                            ConcatKey <TLeftKey, TRightKey> .MakeRight(m_rightOrdered?rightKey : default(TRightKey));

                        Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> > oldEntry;
                        Wrapper <TInputOutput> wrappedElem = new Wrapper <TInputOutput>(elem.First);

                        if (!union.TryGetValue(wrappedElem, out oldEntry) || m_keyComparer.Compare(key, oldEntry.Second) < 0)
                        {
                            union[wrappedElem] = new Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> >(elem.First, key);;
                        }
                    }

                    m_outputEnumerator = union.GetEnumerator();
                }

                if (m_outputEnumerator.MoveNext())
                {
                    Pair <TInputOutput, ConcatKey <TLeftKey, TRightKey> > current = m_outputEnumerator.Current.Value;
                    currentElement = current.First;
                    currentKey     = current.Second;
                    return(true);
                }

                return(false);
            }
Esempio n. 12
0
        internal DefaultMergeHelper(PartitionedStream <TInputOutput, TIgnoreKey> partitions, bool ignoreOutput, ParallelMergeOptions options, TaskScheduler taskScheduler, CancellationState cancellationState, int queryId)
        {
            this.m_taskGroupState = new QueryTaskGroupState(cancellationState, queryId);
            this.m_partitions     = partitions;
            this.m_taskScheduler  = taskScheduler;
            this.m_ignoreOutput   = ignoreOutput;
            if (!ignoreOutput)
            {
                if (options != ParallelMergeOptions.FullyBuffered)
                {
                    if (partitions.PartitionCount > 1)
                    {
                        this.m_asyncChannels = MergeExecutor <TInputOutput> .MakeAsynchronousChannels(partitions.PartitionCount, options, cancellationState.MergedCancellationToken);

                        this.m_channelEnumerator = new AsynchronousChannelMergeEnumerator <TInputOutput>(this.m_taskGroupState, this.m_asyncChannels);
                    }
                    else
                    {
                        this.m_channelEnumerator = ExceptionAggregator.WrapQueryEnumerator <TInputOutput, TIgnoreKey>(partitions[0], this.m_taskGroupState.CancellationState).GetEnumerator();
                    }
                }
                else
                {
                    this.m_syncChannels = MergeExecutor <TInputOutput> .MakeSynchronousChannels(partitions.PartitionCount);

                    this.m_channelEnumerator = new SynchronousChannelMergeEnumerator <TInputOutput>(this.m_taskGroupState, this.m_syncChannels);
                }
            }
        }
Esempio n. 13
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
            {
                Contract.Assert(m_source != null);

                if (m_alreadySearched)
                {
                    return(false);
                }

                // Look for the lowest element.
                TSource candidate      = default(TSource);
                int     candidateIndex = -1;

                try
                {
                    int key = default(int);
                    int i   = 0;
                    while (m_source.MoveNext(ref candidate, ref key))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        // If the predicate is null or the current element satisfies it, we have found the
                        // current partition's "candidate" for the first element.  Note it.
                        if (m_predicate == null || m_predicate(candidate))
                        {
                            candidateIndex = key;

                            // Try to swap our index with the shared one, so long as it's smaller.
                            int observedSharedIndex;
                            do
                            {
                                observedSharedIndex = m_sharedFirstCandidate.Value;
                            } while ((observedSharedIndex == -1 || candidateIndex < observedSharedIndex) &&
                                     Interlocked.CompareExchange(ref m_sharedFirstCandidate.Value, candidateIndex, observedSharedIndex) != observedSharedIndex);
                            break;
                        }
                        else if (m_sharedFirstCandidate.Value != -1 && key > m_sharedFirstCandidate.Value)
                        {
                            // We've scanned past another partition's best element. Bail.
                            break;
                        }
                    }
                }
                finally
                {
                    // No matter whether we exit due to an exception or normal completion, we must ensure
                    // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                    m_sharedBarrier.Signal();
                }

                m_alreadySearched = true;

                // Only if we might be a candidate do we wait.
                if (candidateIndex != -1)
                {
                    m_sharedBarrier.Wait(m_cancellationToken);

                    // Now re-read the shared index. If it's the same as ours, we won and return true.
                    if (m_sharedFirstCandidate.Value == candidateIndex)
                    {
                        currentElement = candidate;
                        currentKey     = 0; // 1st (and only) element, so we hardcode the output index to 0.
                        return(true);
                    }
                }

                // If we got here, we didn't win. Return false.
                return(false);
            }
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TResult currentElement, ref int currentKey)
            {
                // If the buffer has not been created, we will generate it lazily on demand.
                if (m_buffer == null)
                {
                    // Create a buffer, but don't publish it yet (in case of exception).
                    List <Pair <TResult, int> > buffer = new List <Pair <TResult, int> >();

                    // Enter the search phase.  In this phase, we scan the input until one of three
                    // things happens:  (1) all input has been exhausted, (2) the predicate yields
                    // false for one of our elements, or (3) we move past the current lowest index
                    // found by other partitions for a false element.  As we go, we have to remember
                    // the elements by placing them into the buffer.

                    // @TODO: @BUG#595: should we integrate these kinds of loops with cancelation due to exceptions?

                    try
                    {
                        TResult current = default(TResult);
                        int     index   = default(int);
                        int     i       = 0; //counter to help with cancellation
                        while (m_source.MoveNext(ref current, ref index))
                        {
                            if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                            {
                                CancellationState.ThrowIfCanceled(m_cancellationToken);
                            }

                            // Add the current element to our buffer.
                            // @TODO: @PERF: @BUG#414: some day we can optimize this, e.g. if the input is an array,
                            //     we can always just rescan it later. Could expose this via a "Reset" mechanism.
                            buffer.Add(new Pair <TResult, int>(current, index));

                            // See if another partition has found a false value before this element. If so,
                            // we should stop scanning the input now and reach the barrier ASAP.
                            int currentLowIndex = m_sharedLowFalse.Value;
                            if (currentLowIndex != -1 && index > currentLowIndex)
                            {
                                break;
                            }

                            // Evaluate the predicate, either indexed or not based on info passed to the ctor.
                            bool predicateResult;
                            if (m_predicate != null)
                            {
                                predicateResult = m_predicate(current);
                            }
                            else
                            {
                                Contract.Assert(m_indexedPredicate != null);
                                predicateResult = m_indexedPredicate(current, index);
                            }

                            if (!predicateResult)
                            {
                                // Signal that we've found a false element, racing with other partitions to
                                // set the shared index value. If we lose this race, that's fine: the one trying
                                // to publish the lowest value will ultimately win; so we retry if ours is
                                // lower, or bail right away otherwise. We use a spin wait to deal with contention.
                                int      observedLowIndex;
                                SpinWait s = new SpinWait();
                                while (true)
                                {
                                    // Read the current value of the index with a volatile load to prevent movement.
                                    observedLowIndex = Thread.VolatileRead(ref m_sharedLowFalse.Value);

                                    // If the current shared index is set and lower than ours, we won't try to CAS.
                                    if ((observedLowIndex != -1 && observedLowIndex < index) ||
                                        Interlocked.CompareExchange(ref m_sharedLowFalse.Value, index, observedLowIndex) == observedLowIndex)
                                    {
                                        // Either the current value is lower or we succeeded in swapping the
                                        // current value with ours. We're done.
                                        break;
                                    }

                                    // If we failed the swap, we will spin briefly to reduce contention.
                                    s.SpinOnce();
                                }

                                // Exit the loop and reach the barrier.
                                break;
                            }
                        }
                    }
                    finally
                    {
                        // No matter whether we exit due to an exception or normal completion, we must ensure
                        // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                        m_sharedBarrier.Signal();
                    }

                    // Before exiting the search phase, we will synchronize with others. This is a barrier.
                    m_sharedBarrier.Wait(m_cancellationToken);

                    // Publish the buffer and set the index to just before the 1st element.
                    m_buffer      = buffer;
                    m_bufferIndex = new Shared <int>(-1);
                }

                // Now either enter (or continue) the yielding phase. As soon as we reach this, we know the
                // current shared "low false" value is the absolute lowest with a false.
                if (m_take)
                {
                    // In the case of a take-while, we will yield each element from our buffer for which
                    // the element is lesser than the lowest false index found.
                    if (m_bufferIndex.Value >= m_buffer.Count - 1)
                    {
                        return(false);
                    }

                    // Increment the index, and remember the values.
                    ++m_bufferIndex.Value;
                    currentElement = m_buffer[m_bufferIndex.Value].First;
                    currentKey     = m_buffer[m_bufferIndex.Value].Second;

                    return(m_sharedLowFalse.Value == -1 ||
                           m_sharedLowFalse.Value > m_buffer[m_bufferIndex.Value].Second);
                }
                else
                {
                    // If no false was found, the output is empty.
                    if (m_sharedLowFalse.Value == -1)
                    {
                        return(false);
                    }

                    // In the case of a skip-while, we must skip over elements whose index is lesser than the
                    // lowest index found. Once we've exhausted the buffer, we must go back and continue
                    // enumerating the data source until it is empty.
                    if (m_bufferIndex.Value < m_buffer.Count - 1)
                    {
                        for (m_bufferIndex.Value++; m_bufferIndex.Value < m_buffer.Count; m_bufferIndex.Value++)
                        {
                            // If the current buffered element's index is greater than or equal to the smallest
                            // false index found, we will yield it as a result.
                            if (m_buffer[m_bufferIndex.Value].Second >= m_sharedLowFalse.Value)
                            {
                                currentElement = m_buffer[m_bufferIndex.Value].First;
                                currentKey     = m_buffer[m_bufferIndex.Value].Second;
                                return(true);
                            }
                        }
                    }

                    // Lastly, so long as our input still has elements, they will be yieldable.
                    if (m_source.MoveNext(ref currentElement, ref currentKey))
                    {
                        Contract.Assert(currentKey > m_sharedLowFalse.Value,
                                        "expected remaining element indices to be greater than smallest");
                        return(true);
                    }
                }

                return(false);
            }
Esempio n. 15
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TResult currentElement, ref int currentKey)
            {
                Contract.Assert(m_sharedIndices != null);

                // If the buffer has not been created, we will populate it lazily on demand.
                if (m_buffer == null && m_count > 0)
                {
                    // Create a buffer, but don't publish it yet (in case of exception).
                    List <Pair <TResult, int> > buffer = new List <Pair <TResult, int> >();

                    // Enter the search phase. In this phase, all partitions race to populate
                    // the shared indices with their first 'count' contiguous elements.
                    TResult current = default(TResult);
                    int     index   = default(int);
                    int     i       = 0; //counter to help with cancellation
                    while (buffer.Count < m_count && m_source.MoveNext(ref current, ref index))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        // Add the current element to our buffer.
                        // @TODO: @PERF: @BUG#414: some day we can optimize this, e.g. if the input is an array,
                        //     we can always just rescan it later. Could expose this via a "Reset" mechanism.
                        buffer.Add(new Pair <TResult, int>(current, index));

                        // Now we will try to insert our index into the shared indices list, quitting if
                        // our index is greater than all of the indices already inside it.
                        lock (m_sharedIndices)
                        {
                            if (!m_sharedIndices.Insert(index))
                            {
                                // We have read past the maximum index. We can move to the barrier now.
                                break;
                            }
                        }
                    }

                    // Before exiting the search phase, we will synchronize with others. This is a barrier.
                    m_sharedBarrier.Signal();
                    m_sharedBarrier.Wait(m_cancellationToken);

                    // Publish the buffer and set the index to just before the 1st element.
                    m_buffer      = buffer;
                    m_bufferIndex = new Shared <int>(-1);
                }

                // Now either enter (or continue) the yielding phase. As soon as we reach this, we know the
                // index of the 'count'-th input element.
                if (m_take)
                {
                    // In the case of a Take, we will yield each element from our buffer for which
                    // the element is lesser than the 'count'-th index found.
                    if (m_count == 0 || m_bufferIndex.Value >= m_buffer.Count - 1)
                    {
                        return(false);
                    }

                    // Increment the index, and remember the values.
                    ++m_bufferIndex.Value;
                    currentElement = m_buffer[m_bufferIndex.Value].First;
                    currentKey     = m_buffer[m_bufferIndex.Value].Second;

                    // Only yield the element if its index is less than or equal to the max index.
                    int maxIndex = m_sharedIndices.MaxValue;
                    return(maxIndex == -1 || m_buffer[m_bufferIndex.Value].Second <= maxIndex);
                }
                else
                {
                    int minIndex = -1;

                    // If the count to skip was greater than 0, look at the buffer.
                    if (m_count > 0)
                    {
                        // If there wasn't enough input to skip, return right away.
                        if (m_sharedIndices.Count < m_count)
                        {
                            return(false);
                        }

                        minIndex = m_sharedIndices.MaxValue;

                        // In the case of a skip, we must skip over elements whose index is lesser than the
                        // 'count'-th index found. Once we've exhausted the buffer, we must go back and continue
                        // enumerating the data source until it is empty.
                        if (m_bufferIndex.Value < m_buffer.Count - 1)
                        {
                            for (m_bufferIndex.Value++; m_bufferIndex.Value < m_buffer.Count; m_bufferIndex.Value++)
                            {
                                // If the current buffered element's index is greater than the 'count'-th index,
                                // we will yield it as a result.
                                if (m_buffer[m_bufferIndex.Value].Second > minIndex)
                                {
                                    currentElement = m_buffer[m_bufferIndex.Value].First;
                                    currentKey     = m_buffer[m_bufferIndex.Value].Second;
                                    return(true);
                                }
                            }
                        }
                    }

                    // Lastly, so long as our input still has elements, they will be yieldable.
                    if (m_source.MoveNext(ref currentElement, ref currentKey))
                    {
                        Contract.Assert(currentKey > minIndex,
                                        "expected remaining element indices to be greater than smallest");
                        return(true);
                    }
                }

                return(false);
            }
Esempio n. 16
0
        internal static MergeExecutor <TInputOutput> Execute <TKey>(PartitionedStream <TInputOutput, TKey> partitions, bool ignoreOutput, ParallelMergeOptions options, TaskScheduler taskScheduler, bool isOrdered, CancellationState cancellationState, int queryId)
        {
            MergeExecutor <TInputOutput> executor = new MergeExecutor <TInputOutput>();

            if (isOrdered && !ignoreOutput)
            {
                if ((options != ParallelMergeOptions.FullyBuffered) && !partitions.OrdinalIndexState.IsWorseThan(OrdinalIndexState.Increasing))
                {
                    bool autoBuffered = options == ParallelMergeOptions.AutoBuffered;
                    if (partitions.PartitionCount > 1)
                    {
                        executor.m_mergeHelper = new OrderPreservingPipeliningMergeHelper <TInputOutput>((PartitionedStream <TInputOutput, int>)partitions, taskScheduler, cancellationState, autoBuffered, queryId);
                    }
                    else
                    {
                        executor.m_mergeHelper = new DefaultMergeHelper <TInputOutput, TKey>(partitions, false, options, taskScheduler, cancellationState, queryId);
                    }
                }
                else
                {
                    executor.m_mergeHelper = new OrderPreservingMergeHelper <TInputOutput, TKey>(partitions, taskScheduler, cancellationState, queryId);
                }
            }
            else
            {
                executor.m_mergeHelper = new DefaultMergeHelper <TInputOutput, TKey>(partitions, ignoreOutput, options, taskScheduler, cancellationState, queryId);
            }
            executor.Execute();
            return(executor);
        }
Esempio n. 17
0
        //-----------------------------------------------------------------------------------
        // Constructs a new settings structure.
        //
        internal QuerySettings(TaskScheduler taskScheduler, int? degreeOfParallelism,
            CancellationToken externalCancellationToken, ParallelExecutionMode? executionMode,
            ParallelMergeOptions? mergeOptions)
        {
            _taskScheduler = taskScheduler;
            _degreeOfParallelism = degreeOfParallelism;
            _cancellationState = new CancellationState(externalCancellationToken);
            _executionMode = executionMode;
            _mergeOptions = mergeOptions;
            _queryId = -1;

            Contract.Assert(_cancellationState != null);
        }
            //---------------------------------------------------------------------------------------
            // Walks the two data sources, left and then right, to produce the distinct set
            //

            internal override bool MoveNext(ref TInputOutput currentElement, ref TLeftKey currentKey)
            {
                Debug.Assert(_leftSource != null);
                Debug.Assert(_rightSource != null);

                // Build the set out of the left data source, if we haven't already.
                if (_outputEnumerator == null)
                {
                    Set <TInputOutput> rightLookup = new Set <TInputOutput>(_comparer);

                    Pair <TInputOutput, NoKeyMemoizationRequired> rightElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);
                    int rightKeyUnused = default(int);
                    int i = 0;
                    while (_rightSource.MoveNext(ref rightElement, ref rightKeyUnused))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        rightLookup.Add(rightElement.First);
                    }

                    var leftLookup =
                        new Dictionary <Wrapper <TInputOutput>, Pair <TInputOutput, TLeftKey> >(
                            new WrapperEqualityComparer <TInputOutput>(_comparer));

                    Pair <TInputOutput, NoKeyMemoizationRequired> leftElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);
                    TLeftKey leftKey = default(TLeftKey);
                    while (_leftSource.MoveNext(ref leftElement, ref leftKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        if (rightLookup.Contains(leftElement.First))
                        {
                            continue;
                        }

                        Pair <TInputOutput, TLeftKey> oldEntry;
                        Wrapper <TInputOutput>        wrappedLeftElement = new Wrapper <TInputOutput>(leftElement.First);
                        if (!leftLookup.TryGetValue(wrappedLeftElement, out oldEntry) || _leftKeyComparer.Compare(leftKey, oldEntry.Second) < 0)
                        {
                            // For each "elem" value, we store the smallest key, and the element value that had that key.
                            // Note that even though two element values are "equal" according to the EqualityComparer,
                            // we still cannot choose arbitrarily which of the two to yield.
                            leftLookup[wrappedLeftElement] = new Pair <TInputOutput, TLeftKey>(leftElement.First, leftKey);
                        }
                    }

                    _outputEnumerator = leftLookup.GetEnumerator();
                }

                if (_outputEnumerator.MoveNext())
                {
                    Pair <TInputOutput, TLeftKey> currentPair = _outputEnumerator.Current.Value;
                    currentElement = currentPair.First;
                    currentKey     = currentPair.Second;
                    return(true);
                }

                return(false);
            }
Esempio n. 19
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TResult currentElement, ref TKey currentKey)
            {
                Debug.Assert(_sharedIndices != null);

                // If the buffer has not been created, we will populate it lazily on demand.
                if (_buffer == null && _count > 0)
                {
                    // Create a buffer, but don't publish it yet (in case of exception).
                    List <Pair> buffer = new List <Pair>();

                    // Enter the search phase. In this phase, all partitions race to populate
                    // the shared indices with their first 'count' contiguous elements.
                    TResult current = default(TResult);
                    TKey    index   = default(TKey);
                    int     i       = 0; //counter to help with cancellation
                    while (buffer.Count < _count && _source.MoveNext(ref current, ref index))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        // Add the current element to our buffer.
                        buffer.Add(new Pair(current, index));

                        // Now we will try to insert our index into the shared indices list, quitting if
                        // our index is greater than all of the indices already inside it.
                        lock (_sharedIndices)
                        {
                            if (!_sharedIndices.Insert(index))
                            {
                                // We have read past the maximum index. We can move to the barrier now.
                                break;
                            }
                        }
                    }

                    // Before exiting the search phase, we will synchronize with others. This is a barrier.
                    _sharedBarrier.Signal();
                    _sharedBarrier.Wait(_cancellationToken);

                    // Publish the buffer and set the index to just before the 1st element.
                    _buffer      = buffer;
                    _bufferIndex = new Shared <int>(-1);
                }

                // Now either enter (or continue) the yielding phase. As soon as we reach this, we know the
                // index of the 'count'-th input element.
                if (_take)
                {
                    // In the case of a Take, we will yield each element from our buffer for which
                    // the element is lesser than the 'count'-th index found.
                    if (_count == 0 || _bufferIndex.Value >= _buffer.Count - 1)
                    {
                        return(false);
                    }

                    // Increment the index, and remember the values.
                    ++_bufferIndex.Value;
                    currentElement = (TResult)_buffer[_bufferIndex.Value].First;
                    currentKey     = (TKey)_buffer[_bufferIndex.Value].Second;

                    // Only yield the element if its index is less than or equal to the max index.
                    return(_sharedIndices.Count == 0 ||
                           _keyComparer.Compare((TKey)_buffer[_bufferIndex.Value].Second, _sharedIndices.MaxValue) <= 0);
                }
                else
                {
                    TKey minKey = default(TKey);

                    // If the count to skip was greater than 0, look at the buffer.
                    if (_count > 0)
                    {
                        // If there wasn't enough input to skip, return right away.
                        if (_sharedIndices.Count < _count)
                        {
                            return(false);
                        }

                        minKey = _sharedIndices.MaxValue;

                        // In the case of a skip, we must skip over elements whose index is lesser than the
                        // 'count'-th index found. Once we've exhausted the buffer, we must go back and continue
                        // enumerating the data source until it is empty.
                        if (_bufferIndex.Value < _buffer.Count - 1)
                        {
                            for (_bufferIndex.Value++; _bufferIndex.Value < _buffer.Count; _bufferIndex.Value++)
                            {
                                // If the current buffered element's index is greater than the 'count'-th index,
                                // we will yield it as a result.
                                if (_keyComparer.Compare((TKey)_buffer[_bufferIndex.Value].Second, minKey) > 0)
                                {
                                    currentElement = (TResult)_buffer[_bufferIndex.Value].First;
                                    currentKey     = (TKey)_buffer[_bufferIndex.Value].Second;
                                    return(true);
                                }
                            }
                        }
                    }

                    // Lastly, so long as our input still has elements, they will be yieldable.
                    if (_source.MoveNext(ref currentElement, ref currentKey))
                    {
                        Debug.Assert(_count <= 0 || _keyComparer.Compare(currentKey, minKey) > 0,
                                     "expected remaining element indices to be greater than smallest");
                        return(true);
                    }
                }

                return(false);
            }
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
            {
                Contract.Assert(m_source != null);

                if (m_alreadySearched)
                {
                    return(false);
                }

                // Look for the lowest element.
                TSource candidate    = default(TSource);
                TKey    candidateKey = default(TKey);

                try
                {
                    TSource value = default(TSource);
                    TKey    key   = default(TKey);
                    int     i     = 0;
                    while (m_source.MoveNext(ref value, ref key))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        // If the predicate is null or the current element satisfies it, we have found the
                        // current partition's "candidate" for the first element.  Note it.
                        if (m_predicate == null || m_predicate(value))
                        {
                            candidate    = value;
                            candidateKey = key;

                            lock (m_operatorState)
                            {
                                if (m_operatorState.m_partitionId == -1 || m_keyComparer.Compare(candidateKey, m_operatorState.m_key) < 0)
                                {
                                    m_operatorState.m_key         = candidateKey;
                                    m_operatorState.m_partitionId = m_partitionId;
                                }
                            }

                            break;
                        }
                    }
                }
                finally
                {
                    // No matter whether we exit due to an exception or normal completion, we must ensure
                    // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                    m_sharedBarrier.Signal();
                }

                m_alreadySearched = true;

                // Wait only if we may have the result
                if (m_partitionId == m_operatorState.m_partitionId)
                {
                    m_sharedBarrier.Wait(m_cancellationToken);

                    // Now re-read the shared index. If it's the same as ours, we won and return true.
                    if (m_partitionId == m_operatorState.m_partitionId)
                    {
                        currentElement = candidate;
                        currentKey     = 0; // 1st (and only) element, so we hardcode the output index to 0.
                        return(true);
                    }
                }

                // If we got here, we didn't win. Return false.
                return(false);
            }
Esempio n. 21
0
            //---------------------------------------------------------------------------------------
            // Walks the two data sources, left and then right, to produce the union.
            //

            internal override bool MoveNext(ref TInputOutput currentElement, ref ConcatKey currentKey)
            {
                Debug.Assert(_leftSource != null);
                Debug.Assert(_rightSource != null);

                if (_outputEnumerator == null)
                {
                    IEqualityComparer <Wrapper <TInputOutput> > wrapperComparer = new WrapperEqualityComparer <TInputOutput>(_comparer);
                    Dictionary <Wrapper <TInputOutput>, Pair>   union           =
                        new Dictionary <Wrapper <TInputOutput>, Pair>(wrapperComparer);

                    Pair     elem    = new Pair(default(TInputOutput), default(NoKeyMemoizationRequired));
                    TLeftKey leftKey = default(TLeftKey);

                    int i = 0;
                    while (_leftSource.MoveNext(ref elem, ref leftKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        ConcatKey key =
                            ConcatKey.MakeLeft <TLeftKey, TRightKey>(_leftOrdered ? leftKey : default(TLeftKey));
                        Pair oldEntry;
                        Wrapper <TInputOutput> wrappedElem = new Wrapper <TInputOutput>((TInputOutput)elem.First);

                        if (!union.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(key, (ConcatKey)oldEntry.Second) < 0)
                        {
                            union[wrappedElem] = new Pair(elem.First, key);
                        }
                    }

                    TRightKey rightKey = default(TRightKey);
                    while (_rightSource.MoveNext(ref elem, ref rightKey))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        ConcatKey key =
                            ConcatKey.MakeRight <TLeftKey, TRightKey>(_rightOrdered ? rightKey : default(TRightKey));
                        Pair oldEntry;
                        Wrapper <TInputOutput> wrappedElem = new Wrapper <TInputOutput>((TInputOutput)elem.First);

                        if (!union.TryGetValue(wrappedElem, out oldEntry) || _keyComparer.Compare(key, (ConcatKey)oldEntry.Second) < 0)
                        {
                            union[wrappedElem] = new Pair(elem.First, key);;
                        }
                    }

                    _outputEnumerator = union.GetEnumerator();
                }

                if (_outputEnumerator.MoveNext())
                {
                    Pair current = _outputEnumerator.Current.Value;
                    currentElement = (TInputOutput)current.First;
                    currentKey     = (ConcatKey)current.Second;
                    return(true);
                }

                return(false);
            }
Esempio n. 22
0
        internal override bool MoveNext(ref TOutput currentElement, ref TLeftKey currentKey)
        {
            Mutables <TLeftInput, TLeftKey, TRightInput, THashKey, TOutput> mutables = this.m_mutables;

            if (mutables == null)
            {
                mutables = this.m_mutables = new Mutables <TLeftInput, TLeftKey, TRightInput, THashKey, TOutput>();
                mutables.m_rightHashLookup = new HashLookup <THashKey, Pair <TRightInput, ListChunk <TRightInput> > >(this.m_keyComparer);
                Pair <TRightInput, THashKey> pair = new Pair <TRightInput, THashKey>();
                int num  = 0;
                int num2 = 0;
                while (this.m_rightSource.MoveNext(ref pair, ref num))
                {
                    if ((num2++ & 0x3f) == 0)
                    {
                        CancellationState.ThrowIfCanceled(this.m_cancellationToken);
                    }
                    TRightInput first  = pair.First;
                    THashKey    second = pair.Second;
                    if (second != null)
                    {
                        Pair <TRightInput, ListChunk <TRightInput> > pair2 = new Pair <TRightInput, ListChunk <TRightInput> >();
                        if (!mutables.m_rightHashLookup.TryGetValue(second, ref pair2))
                        {
                            pair2 = new Pair <TRightInput, ListChunk <TRightInput> >(first, null);
                            if (this.m_groupResultSelector != null)
                            {
                                pair2.Second = new ListChunk <TRightInput>(2);
                                pair2.Second.Add(first);
                            }
                            mutables.m_rightHashLookup.Add(second, pair2);
                        }
                        else
                        {
                            if (pair2.Second == null)
                            {
                                pair2.Second = new ListChunk <TRightInput>(2);
                                mutables.m_rightHashLookup[second] = pair2;
                            }
                            pair2.Second.Add(first);
                        }
                    }
                }
            }
            ListChunk <TRightInput> currentRightMatches = mutables.m_currentRightMatches;

            if ((currentRightMatches != null) && (mutables.m_currentRightMatchesIndex == currentRightMatches.Count))
            {
                currentRightMatches = mutables.m_currentRightMatches = currentRightMatches.Next;
                mutables.m_currentRightMatchesIndex = 0;
            }
            if (mutables.m_currentRightMatches == null)
            {
                Pair <TLeftInput, THashKey> pair3 = new Pair <TLeftInput, THashKey>();
                TLeftKey local3 = default(TLeftKey);
                while (this.m_leftSource.MoveNext(ref pair3, ref local3))
                {
                    if ((mutables.m_outputLoopCount++ & 0x3f) == 0)
                    {
                        CancellationState.ThrowIfCanceled(this.m_cancellationToken);
                    }
                    Pair <TRightInput, ListChunk <TRightInput> > pair4 = new Pair <TRightInput, ListChunk <TRightInput> >();
                    TLeftInput local4 = pair3.First;
                    THashKey   key    = pair3.Second;
                    if (((key != null) && mutables.m_rightHashLookup.TryGetValue(key, ref pair4)) && (this.m_singleResultSelector != null))
                    {
                        mutables.m_currentRightMatches      = pair4.Second;
                        mutables.m_currentRightMatchesIndex = 0;
                        currentElement = this.m_singleResultSelector(local4, pair4.First);
                        currentKey     = local3;
                        if (pair4.Second != null)
                        {
                            mutables.m_currentLeft    = local4;
                            mutables.m_currentLeftKey = local3;
                        }
                        return(true);
                    }
                    if (this.m_groupResultSelector != null)
                    {
                        IEnumerable <TRightInput> enumerable = pair4.Second;
                        if (enumerable == null)
                        {
                            enumerable = (IEnumerable <TRightInput>)ParallelEnumerable.Empty <TRightInput>();
                        }
                        currentElement = this.m_groupResultSelector(local4, enumerable);
                        currentKey     = local3;
                        return(true);
                    }
                }
                return(false);
            }
            currentElement = this.m_singleResultSelector(mutables.m_currentLeft, mutables.m_currentRightMatches.m_chunk[mutables.m_currentRightMatchesIndex]);
            currentKey     = mutables.m_currentLeftKey;
            mutables.m_currentRightMatchesIndex++;
            return(true);
        }
Esempio n. 23
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TOutput currentElement, ref Pair currentKey)
            {
                while (true)
                {
                    if (_currentRightSource == null)
                    {
                        _mutables = new Mutables();

                        // Check cancellation every few lhs-enumerations in case none of them are producing
                        // any outputs.  Otherwise, we rely on the consumer of this operator to be performing the checks.
                        if ((_mutables._lhsCount++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        // We don't have a "current" right enumerator to use. We have to fetch the next
                        // one. If the left has run out of elements, however, we're done and just return
                        // false right away.

                        if (!_leftSource.MoveNext(ref _mutables._currentLeftElement, ref _mutables._currentLeftKey))
                        {
                            return(false);
                        }

                        // Use the source selection routine to create a right child.
                        IEnumerable <TRightInput> rightChild = _selectManyOperator._rightChildSelector(_mutables._currentLeftElement);

                        Contract.Assert(rightChild != null);
                        _currentRightSource = rightChild.GetEnumerator();

                        Contract.Assert(_currentRightSource != null);

                        // If we have no result selector, we will need to access the Current element of the right
                        // data source as though it is a TOutput. Unfortunately, we know that TRightInput must
                        // equal TOutput (we check it during operator construction), but the type system doesn't.
                        // Thus we would have to cast the result of invoking Current from type TRightInput to
                        // TOutput. This is no good, since the results could be value types. Instead, we save the
                        // enumerator object as an IEnumerator<TOutput> and access that later on.
                        if (_selectManyOperator._resultSelector == null)
                        {
                            _currentRightSourceAsOutput = (IEnumerator <TOutput>)(object) _currentRightSource;
                            Contract.Assert(_currentRightSourceAsOutput == _currentRightSource,
                                            "these must be equal, otherwise the surrounding logic will be broken");
                        }
                    }

                    if (_currentRightSource.MoveNext())
                    {
                        _mutables._currentRightSourceIndex++;

                        // If the inner data source has an element, we can yield it.
                        if (_selectManyOperator._resultSelector != null)
                        {
                            // In the case of a selection function, use that to yield the next element.
                            currentElement = _selectManyOperator._resultSelector(_mutables._currentLeftElement, _currentRightSource.Current);
                        }
                        else
                        {
                            // Otherwise, the right input and output types must be the same. We use the
                            // casted copy of the current right source and just return its current element.
                            Contract.Assert(_currentRightSourceAsOutput != null);
                            currentElement = _currentRightSourceAsOutput.Current;
                        }
                        currentKey = new Pair(_mutables._currentLeftKey, _mutables._currentRightSourceIndex);

                        return(true);
                    }
                    else
                    {
                        // Otherwise, we have exhausted the right data source. Loop back around and try
                        // to get the next left element, then its right, and so on.
                        _currentRightSource.Dispose();
                        _currentRightSource         = null;
                        _currentRightSourceAsOutput = null;
                    }
                }
            }
Esempio n. 24
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TSource currentElement, ref int currentKey)
            {
                Debug.Assert(_source != null);

                if (_alreadySearched)
                {
                    return(false);
                }

                // Look for the greatest element.
                TSource candidate      = default(TSource) !;
                TKey    candidateKey   = default(TKey) !;
                bool    candidateFound = false;

                try
                {
                    int     loopCount = 0; //counter to help with cancellation
                    TSource value     = default(TSource) !;
                    TKey    key       = default(TKey) !;
                    while (_source.MoveNext(ref value !, ref key))
                    {
                        if ((loopCount & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        // If the predicate is null or the current element satisfies it, we will remember
                        // it as the current partition's candidate for the last element, and move on.
                        if (_predicate == null || _predicate(value))
                        {
                            candidate      = value;
                            candidateKey   = key;
                            candidateFound = true;
                        }

                        loopCount++;
                    }

                    // If we found a candidate element, try to publish it, so long as it's greater.
                    if (candidateFound)
                    {
                        lock (_operatorState)
                        {
                            if (_operatorState._partitionId == -1 || _keyComparer.Compare(candidateKey, _operatorState._key) > 0)
                            {
                                _operatorState._partitionId = _partitionId;
                                _operatorState._key         = candidateKey;
                            }
                        }
                    }
                }
                finally
                {
                    // No matter whether we exit due to an exception or normal completion, we must ensure
                    // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                    _sharedBarrier.Signal();
                }

                _alreadySearched = true;

                // Only if we have a candidate do we wait.
                if (_partitionId == _operatorState._partitionId)
                {
                    _sharedBarrier.Wait(_cancellationToken);

                    // Now re-read the shared index. If it's the same as ours, we won and return true.
                    if (_operatorState._partitionId == _partitionId)
                    {
                        currentElement = candidate;
                        currentKey     = 0; // 1st (and only) element, so we hardcode the output index to 0.
                        return(true);
                    }
                }

                // If we got here, we didn't win. Return false.
                return(false);
            }
Esempio n. 25
0
            //---------------------------------------------------------------------------------------
            // Walks the two data sources, left and then right, to produce the union.
            //

            internal override bool MoveNext([MaybeNullWhen(false), AllowNull] ref TInputOutput currentElement, ref int currentKey)
            {
                if (_hashLookup == null)
                {
                    _hashLookup      = new Set <TInputOutput>(_comparer);
                    _outputLoopCount = new Shared <int>(0);
                }

                Debug.Assert(_hashLookup != null);

                // Enumerate the left and then right data source. When each is done, we set the
                // field to null so we will skip it upon subsequent calls to MoveNext.
                if (_leftSource != null)
                {
                    // Iterate over this set's elements until we find a unique element.
                    TLeftKey keyUnused = default(TLeftKey) !;
                    Pair <TInputOutput, NoKeyMemoizationRequired> currentLeftElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);

                    int i = 0;
                    while (_leftSource.MoveNext(ref currentLeftElement, ref keyUnused))
                    {
                        if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        // We ensure we never return duplicates by tracking them in our set.
                        if (_hashLookup.Add(currentLeftElement.First))
                        {
#if DEBUG
                            currentKey = unchecked ((int)0xdeadbeef);
#endif
                            currentElement = currentLeftElement.First;
                            return(true);
                        }
                    }

                    _leftSource.Dispose();
                    _leftSource = null;
                }


                if (_rightSource != null)
                {
                    // Iterate over this set's elements until we find a unique element.
                    TRightKey keyUnused = default(TRightKey) !;
                    Pair <TInputOutput, NoKeyMemoizationRequired> currentRightElement = default(Pair <TInputOutput, NoKeyMemoizationRequired>);

                    while (_rightSource.MoveNext(ref currentRightElement, ref keyUnused))
                    {
                        Debug.Assert(_outputLoopCount != null);
                        if ((_outputLoopCount.Value++ & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(_cancellationToken);
                        }

                        // We ensure we never return duplicates by tracking them in our set.
                        if (_hashLookup.Add(currentRightElement.First))
                        {
#if DEBUG
                            currentKey = unchecked ((int)0xdeadbeef);
#endif
                            currentElement = currentRightElement.First;
                            return(true);
                        }
                    }

                    _rightSource.Dispose();
                    _rightSource = null;
                }

                return(false);
            }
            internal override bool MoveNext(ref TResult currentElement, ref int currentKey)
            {
                if (this.m_buffer != null)
                {
                    goto Label_0114;
                }
                List <Pair <TResult, int> > list = new List <Pair <TResult, int> >();

                try
                {
                    TResult local = default(TResult);
                    int     num   = 0;
                    int     num2  = 0;
                    while (this.m_source.MoveNext(ref local, ref num))
                    {
                        bool flag;
                        if ((num2++ & 0x3f) == 0)
                        {
                            CancellationState.ThrowIfCanceled(this.m_cancellationToken);
                        }
                        list.Add(new Pair <TResult, int>(local, num));
                        int num3 = this.m_sharedLowFalse.Value;
                        if ((num3 != -1) && (num > num3))
                        {
                            goto Label_00F0;
                        }
                        if (this.m_predicate != null)
                        {
                            flag = this.m_predicate(local);
                        }
                        else
                        {
                            flag = this.m_indexedPredicate(local, num);
                        }
                        if (!flag)
                        {
                            SpinWait wait = new SpinWait();
                            while (true)
                            {
                                int comparand = Thread.VolatileRead(ref this.m_sharedLowFalse.Value);
                                if (((comparand != -1) && (comparand < num)) || (Interlocked.CompareExchange(ref this.m_sharedLowFalse.Value, num, comparand) == comparand))
                                {
                                    goto Label_00F0;
                                }
                                wait.SpinOnce();
                            }
                        }
                    }
                }
                finally
                {
                    this.m_sharedBarrier.Signal();
                }
Label_00F0:
                this.m_sharedBarrier.Wait(this.m_cancellationToken);
                this.m_buffer      = list;
                this.m_bufferIndex = new Shared <int>(-1);
Label_0114:
                if (this.m_take)
                {
                    if (this.m_bufferIndex.Value >= (this.m_buffer.Count - 1))
                    {
                        return(false);
                    }
                    this.m_bufferIndex.Value += 1;
                    Pair <TResult, int> pair = this.m_buffer[this.m_bufferIndex.Value];
                    currentElement = pair.First;
                    Pair <TResult, int> pair2 = this.m_buffer[this.m_bufferIndex.Value];
                    currentKey = pair2.Second;
                    if (this.m_sharedLowFalse.Value != -1)
                    {
                        Pair <TResult, int> pair3 = this.m_buffer[this.m_bufferIndex.Value];
                        return(this.m_sharedLowFalse.Value > pair3.Second);
                    }
                    return(true);
                }
                if (this.m_sharedLowFalse.Value == -1)
                {
                    return(false);
                }
                if (this.m_bufferIndex.Value < (this.m_buffer.Count - 1))
                {
                    this.m_bufferIndex.Value += 1;
                    while (this.m_bufferIndex.Value < this.m_buffer.Count)
                    {
                        Pair <TResult, int> pair4 = this.m_buffer[this.m_bufferIndex.Value];
                        if (pair4.Second >= this.m_sharedLowFalse.Value)
                        {
                            Pair <TResult, int> pair5 = this.m_buffer[this.m_bufferIndex.Value];
                            currentElement = pair5.First;
                            Pair <TResult, int> pair6 = this.m_buffer[this.m_bufferIndex.Value];
                            currentKey = pair6.Second;
                            return(true);
                        }
                        this.m_bufferIndex.Value += 1;
                    }
                }
                return(this.m_source.MoveNext(ref currentElement, ref currentKey));
            }
Esempio n. 27
0
        private readonly int _queryId;                         // Id of this query execution.


        //-----------------------------------------------------------------------------------
        // Creates a new shared bit of state among tasks.
        //

        internal QueryTaskGroupState(CancellationState cancellationState, int queryId)
        {
            _cancellationState = cancellationState;
            _queryId           = queryId;
        }
            protected override bool MoveNextCore(ref float?currentElement)
            {
                QueryOperatorEnumerator <float?, TKey> source = this.m_source;
                TKey currentKey = default(TKey);

                if (!source.MoveNext(ref currentElement, ref currentKey))
                {
                    return(false);
                }
                int num = 0;

                if (this.m_sign != -1)
                {
                    float?nullable2 = null;
                    while (source.MoveNext(ref nullable2, ref currentKey))
                    {
                        if ((num++ & 0x3f) == 0)
                        {
                            CancellationState.ThrowIfCanceled(base.m_cancellationToken);
                        }
                        if (nullable2.HasValue)
                        {
                            if (currentElement.HasValue)
                            {
                                float?nullable5 = nullable2;
                                float?nullable6 = currentElement;
                                if (((nullable5.GetValueOrDefault() <= nullable6.GetValueOrDefault()) || !(nullable5.HasValue & nullable6.HasValue)) && !float.IsNaN(currentElement.GetValueOrDefault()))
                                {
                                    continue;
                                }
                            }
                            currentElement = nullable2;
                        }
                    }
                }
                else
                {
                    float?nullable = null;
                    while (source.MoveNext(ref nullable, ref currentKey))
                    {
                        if ((num++ & 0x3f) == 0)
                        {
                            CancellationState.ThrowIfCanceled(base.m_cancellationToken);
                        }
                        if (nullable.HasValue)
                        {
                            if (currentElement.HasValue)
                            {
                                float?nullable3 = nullable;
                                float?nullable4 = currentElement;
                                if (((nullable3.GetValueOrDefault() >= nullable4.GetValueOrDefault()) || !(nullable3.HasValue & nullable4.HasValue)) && !float.IsNaN(nullable.GetValueOrDefault()))
                                {
                                    continue;
                                }
                            }
                            currentElement = nullable;
                        }
                    }
                }
                return(true);
            }
 /// <summary>
 /// WrapEnumerable.ExceptionAggregator wraps the enumerable with another enumerator that will
 /// catch exceptions, and wrap each with an AggregateException.
 ///
 /// If PLINQ decides to execute a query sequentially, we will reuse LINQ-to-objects
 /// implementations for the different operators. However, we still need to throw
 /// AggregateException in the cases when parallel execution would have thrown an
 /// AggregateException. Thus, we introduce a wrapper enumerator that catches exceptions
 /// and wraps them with an AggregateException.
 /// </summary>
 internal static IEnumerable <TElement> WrapEnumerable <TElement>(IEnumerable <TElement> source, CancellationState cancellationState)
 {
     using (IEnumerator <TElement> enumerator = source.GetEnumerator())
     {
         while (true)
         {
             TElement elem = default(TElement);
             try
             {
                 if (!enumerator.MoveNext())
                 {
                     yield break;
                 }
                 elem = enumerator.Current;
             }
             catch (ThreadAbortException)
             {
                 // Do not wrap ThreadAbortExceptions
                 throw;
             }
             catch (Exception ex)
             {
                 ThrowOCEorAggregateException(ex, cancellationState);
             }
             yield return(elem);
         }
     }
 }
            //---------------------------------------------------------------------------------------
            // This API, upon the first time calling it, walks the entire source query tree. It begins
            // with an accumulator value set to the aggregation operator's seed, and always passes
            // the accumulator along with the current element from the data source to the binary
            // intermediary aggregation operator. The return value is kept in the accumulator. At
            // the end, we will have our intermediate result, ready for final aggregation.
            //

            internal override bool MoveNext(ref TIntermediate currentElement, ref int currentKey)
            {
                Debug.Assert(_reduceOperator != null);
                Debug.Assert(_reduceOperator._intermediateReduce != null, "expected a compiled operator");

                // Only produce a single element.  Return false if MoveNext() was already called before.
                if (_accumulated)
                {
                    return(false);
                }
                _accumulated = true;

                bool          hadNext     = false;
                TIntermediate accumulator = default(TIntermediate);

                // Initialize the accumulator.
                if (_reduceOperator._seedIsSpecified)
                {
                    // If the seed is specified, initialize accumulator to the seed value.
                    accumulator = _reduceOperator._seedFactory == null
                                      ? _reduceOperator._seed
                                      : _reduceOperator._seedFactory();
                }
                else
                {
                    // If the seed is not specified, then we take the first element as the seed.
                    // Seed may be unspecified only if TInput is the same as TIntermediate.
                    Debug.Assert(typeof(TInput) == typeof(TIntermediate));

                    TInput acc          = default(TInput);
                    TKey   accKeyUnused = default(TKey);
                    if (!_source.MoveNext(ref acc, ref accKeyUnused))
                    {
                        return(false);
                    }
                    hadNext     = true;
                    accumulator = (TIntermediate)((object)acc);
                }

                // Scan through the source and accumulate the result.
                TInput input     = default(TInput);
                TKey   keyUnused = default(TKey);
                int    i         = 0;

                while (_source.MoveNext(ref input, ref keyUnused))
                {
                    if ((i++ & CancellationState.POLL_INTERVAL) == 0)
                    {
                        CancellationState.ThrowIfCanceled(_cancellationToken);
                    }
                    hadNext     = true;
                    accumulator = _reduceOperator._intermediateReduce(accumulator, input);
                }

                if (hadNext)
                {
                    currentElement = accumulator;
                    currentKey     = _partitionIndex; // A reduction's "index" is just its partition number.
                    return(true);
                }

                return(false);
            }
Esempio n. 31
0
        private int m_queryId; // Id of this query execution.


        //-----------------------------------------------------------------------------------
        // Creates a new shared bit of state among tasks.
        //

        internal QueryTaskGroupState(CancellationState cancellationState, int queryId)
        {
            m_cancellationState = cancellationState;
            m_queryId = queryId;
        }
Esempio n. 32
0
            //---------------------------------------------------------------------------------------
            // Straightforward IEnumerator<T> methods.
            //

            internal override bool MoveNext(ref TSource currentElement, ref int currentKey)
            {
                Contract.Assert(m_source != null);

                if (m_alreadySearched)
                {
                    return(false);
                }

                // Look for the greatest element.
                TSource candidate      = default(TSource);
                int     candidateIndex = -1;

                try
                {
                    TSource current   = default(TSource);
                    int     key       = default(int);
                    int     loopCount = 0; //counter to help with cancellation
                    while (m_source.MoveNext(ref current, ref key))
                    {
                        if ((loopCount & CancellationState.POLL_INTERVAL) == 0)
                        {
                            CancellationState.ThrowIfCanceled(m_cancellationToken);
                        }

                        // If the predicate is null or the current element satisfies it, we will remember
                        // it as the current partition's candidate for the last element, and move on.
                        if (m_predicate == null || m_predicate(current))
                        {
                            candidate      = current;
                            candidateIndex = key;
                        }

                        loopCount++;
                    }

                    // If we found a candidate element, try to publish it, so long as it's greater.
                    if (candidateIndex != -1)
                    {
                        int observedSharedIndex;
                        do
                        {
                            observedSharedIndex = m_sharedLastCandidate.Value;
                        }while ((observedSharedIndex == -1 || candidateIndex > observedSharedIndex) &&
                                Interlocked.CompareExchange(ref m_sharedLastCandidate.Value, candidateIndex, observedSharedIndex) != observedSharedIndex);
                    }
                }
                finally
                {
                    // No matter whether we exit due to an exception or normal completion, we must ensure
                    // that we signal other partitions that we have completed.  Otherwise, we can cause deadlocks.
                    m_sharedBarrier.Signal();
                }

                m_alreadySearched = true;

                // Only if we have a candidate do we wait.
                if (candidateIndex != -1)
                {
                    m_sharedBarrier.Wait(m_cancellationToken);

                    // Now re-read the shared index. If it's the same as ours, we won and return true.
                    if (m_sharedLastCandidate.Value == candidateIndex)
                    {
                        currentElement = candidate;
                        currentKey     = 0; // 1st (and only) element, so we hardcode the output index to 0.
                        return(true);
                    }
                }

                // If we got here, we didn't win. Return false.
                return(false);
            }