public virtual IList <ClassificationSpan> GetClassificationSpans(SnapshotSpan span)
        {
            Contract.Ensures(Contract.Result <IList <ClassificationSpan> >() != null);

            List <ClassificationSpan> classificationSpans = new List <ClassificationSpan>();

            if (_failedTimeout)
            {
                return(classificationSpans);
            }

            bool spanExtended = false;

            int           extendMultilineSpanToLine = 0;
            SnapshotSpan  extendedSpan = span;
            ITextSnapshot snapshot     = span.Snapshot;

            ClassifierState classifierState = _lineStatesCache.GetValue(snapshot, CreateClassifierState);

            using (_lock.UpgradableReadLock(TimeSpan.FromMilliseconds(250)))
            {
                Span   requestedSpan = span;
                TState startState    = AdjustParseSpan(classifierState, ref span);

                ICharStream input = CreateInputStream(span);
                ITokenSourceWithState <TState> lexer = CreateLexer(input, startState);

                IToken previousToken         = null;
                bool   previousTokenEndsLine = false;

                /* this is held outside the loop because only tokens which end at the end of a line
                 * impact its value.
                 */
                bool lineStateChanged = false;

                while (true)
                {
                    IToken token = lexer.NextToken();

                    // The latter is true for EOF token with span.End at the end of the document
                    bool inBounds = token.StartIndex < span.End.Position ||
                                    token.StopIndex < span.End.Position;

                    int startLineCurrent;
                    if (token.Type == CharStreamConstants.EndOfFile)
                    {
                        startLineCurrent = span.Snapshot.LineCount - 1;
                    }
                    else
                    {
                        startLineCurrent = token.Line - 1;
                    }

                    // endLinePrevious is the line number the previous token ended on
                    int endLinePrevious;
                    if (previousToken != null)
                    {
                        Contract.Assert(previousToken.StopIndex >= previousToken.StartIndex, "previousToken can't be EOF");
                        endLinePrevious = span.Snapshot.GetLineNumberFromPosition(previousToken.StopIndex);
                    }
                    else
                    {
                        endLinePrevious = span.Snapshot.GetLineNumberFromPosition(span.Start) - 1;
                    }

                    if (startLineCurrent > endLinePrevious + 1 || (startLineCurrent == endLinePrevious + 1 && !previousTokenEndsLine))
                    {
                        int firstMultilineLine = endLinePrevious;
                        if (previousToken == null || previousTokenEndsLine)
                        {
                            firstMultilineLine++;
                        }

                        for (int i = firstMultilineLine; i < startLineCurrent; i++)
                        {
                            if (!classifierState._lineStates[i].MultilineToken || lineStateChanged)
                            {
                                extendMultilineSpanToLine = i + 1;
                            }

                            SetLineState(classifierState, i, LineStateInfo.Multiline);
                        }
                    }

                    if (IsMultilineToken(span.Snapshot, lexer, token))
                    {
                        int startLine = span.Snapshot.GetLineNumberFromPosition(token.StartIndex);
                        int stopLine  = span.Snapshot.GetLineNumberFromPosition(Math.Max(token.StartIndex, token.StopIndex));
                        for (int i = startLine; i < stopLine; i++)
                        {
                            if (!classifierState._lineStates[i].MultilineToken)
                            {
                                extendMultilineSpanToLine = i + 1;
                            }

                            SetLineState(classifierState, i, LineStateInfo.Multiline);
                        }
                    }

                    bool tokenEndsLine = TokenEndsAtEndOfLine(span.Snapshot, lexer, token);
                    if (tokenEndsLine)
                    {
                        TState stateAtEndOfLine = lexer.GetCurrentState();
                        int    line             = span.Snapshot.GetLineNumberFromPosition(Math.Max(token.StartIndex, token.StopIndex));
                        lineStateChanged =
                            classifierState._lineStates[line].MultilineToken ||
                            !_stateComparer.Equals(classifierState._lineStates[line].EndLineState, stateAtEndOfLine);

                        // even if the state didn't change, we call SetLineState to make sure the _first/_lastChangedLine values get updated.
                        SetLineState(classifierState, line, new LineStateInfo(stateAtEndOfLine));

                        if (lineStateChanged)
                        {
                            if (line < span.Snapshot.LineCount - 1)
                            {
                                /* update the span's end position or the line state change won't be reflected
                                 * in the editor
                                 */
                                int endPosition = span.Snapshot.GetLineFromLineNumber(line + 1).EndIncludingLineBreak;
                                if (endPosition > extendedSpan.End)
                                {
                                    spanExtended = true;
                                    extendedSpan = new SnapshotSpan(extendedSpan.Snapshot, Span.FromBounds(extendedSpan.Start, endPosition));
                                }
                            }
                        }
                    }

                    if (token.Type == CharStreamConstants.EndOfFile)
                    {
                        break;
                    }

                    if (token.StartIndex >= span.End.Position)
                    {
                        break;
                    }

                    previousToken         = token;
                    previousTokenEndsLine = tokenEndsLine;

                    if (token.StopIndex < requestedSpan.Start)
                    {
                        continue;
                    }

                    var tokenClassificationSpans = GetClassificationSpansForToken(token, span.Snapshot);
                    if (tokenClassificationSpans != null)
                    {
                        classificationSpans.AddRange(tokenClassificationSpans);
                    }

                    if (!inBounds)
                    {
                        break;
                    }
                }
            }

            if (extendMultilineSpanToLine > 0)
            {
                int endPosition = extendMultilineSpanToLine < span.Snapshot.LineCount ? span.Snapshot.GetLineFromLineNumber(extendMultilineSpanToLine).EndIncludingLineBreak : span.Snapshot.Length;
                if (endPosition > extendedSpan.End)
                {
                    spanExtended = true;
                    extendedSpan = new SnapshotSpan(extendedSpan.Snapshot, Span.FromBounds(extendedSpan.Start, endPosition));
                }
            }

            if (spanExtended)
            {
                /* Subtract 1 from each of these because the spans include the line break on their last
                 * line, forcing it to appear as the first position on the following line.
                 */
                int firstLine = extendedSpan.Snapshot.GetLineNumberFromPosition(span.End);
                int lastLine  = extendedSpan.Snapshot.GetLineNumberFromPosition(extendedSpan.End) - 1;
                // when considering the last line of a document, span and extendedSpan may end on the same line
                ForceReclassifyLines(classifierState, firstLine, Math.Max(firstLine, lastLine));
            }

            return(classificationSpans);
        }
Пример #2
0
        /// <summary>
        /// Iterates over the classifiable spans present in the source <see cref="string"/>.
        /// </summary>
        /// <param name="source">A <see cref="string"/> to classify.</param>
        /// <returns>A collection representing every classified token in the source <see cref="string"/>.</returns>
        public IEnumerable <LexerToken> Tokenize(string source)
        {
            var classifiers = LexerLanguage.Classifiers;

            int classifiersCount = classifiers.Length;
            var classifierStates = new ClassifierState[classifiersCount];

            for (int i = 0; i < classifiersCount; i++)
            {
                var classifier = classifiers[i];
                classifierStates[i] = new ClassifierState()
                {
                    endIndex = -1
                };
                classifier.Reset();
            }

            int startIndex = 0;

            for (int charIndex = 0; charIndex < source.Length + 1; charIndex++)
            {
                char c = charIndex != source.Length
                                        ? source[charIndex]
                                        : ' ';

                bool anyContinuing = false;
                for (int i = 0; i < classifiersCount; i++)
                {
                    var classifierState = classifierStates[i];
                    if (classifierState.hasGivenUp)
                    {
                        continue;
                    }

                    var classifier = classifiers[i];
                    var result     = classifier.NextCharacter(c);
                    switch (result.Action)
                    {
                    case ClassifierActionType.GiveUp:
                    {
                        classifierState.hasGivenUp = true;
                        classifierStates[i]        = classifierState;
                        break;
                    }

                    case ClassifierActionType.TokenizeFromLast:
                    {
                        classifierState.endIndex = charIndex - 1;
                        classifierStates[i]      = classifierState;
                        break;
                    }

                    case ClassifierActionType.TokenizeImmediately:
                    {
                        classifierState.endIndex = charIndex;
                        classifierStates[i]      = classifierState;
                        break;
                    }

                    default:
                    case ClassifierActionType.ContinueReading:
                    {
                        anyContinuing = true;
                        break;
                    }
                    }
                }

                if (!anyContinuing)
                {
                    int longest = -1;
                    for (int i = 0; i < classifiersCount; i++)
                    {
                        var classifierState = classifierStates[i];
                        if (classifierState.endIndex > longest)
                        {
                            longest = classifierState.endIndex;
                        }
                    }

                    bool tokenized = false;
                    for (int i = 0; i < classifiersCount; i++)
                    {
                        var classifierState = classifierStates[i];

                        if (classifierState.endIndex != -1 &&
                            classifierState.endIndex == longest)
                        {
                            int classifierEndIndex = classifierState.endIndex;

                            yield return(new LexerToken(
                                             startIndex: startIndex,
                                             length: classifierEndIndex - startIndex + 1,
                                             classifier: i
                                             ));

                            startIndex = classifierEndIndex + 1;
                            charIndex  = classifierEndIndex;
                            tokenized  = true;
                            break;
                        }
                    }

                    if (!tokenized)
                    {
                        if (charIndex != source.Length)
                        {
                            yield return(new LexerToken(
                                             startIndex: startIndex,
                                             length: 1,
                                             classifier: -1
                                             ));
                        }

                        startIndex += 1;
                    }

                    for (int i = 0; i < classifiersCount; i++)
                    {
                        var classifier = classifiers[i];
                        classifierStates[i] = new ClassifierState()
                        {
                            endIndex = -1
                        };
                        classifier.Reset();
                    }
                }
            }
        }
        protected virtual void HandleTextBufferChangedHighPriority(object sender, TextContentChangedEventArgs e)
        {
            try
            {
                using (_lock.WriteLock(TimeSpan.FromSeconds(1)))
                {
                    ClassifierState beforeState = _lineStatesCache.GetValue(e.Before, CreateClassifierState);

                    ClassifierState afterState = _lineStatesCache.GetValue(e.After, CreateClassifierState);
                    afterState._firstChangedLine = beforeState._firstChangedLine;
                    afterState._lastChangedLine  = beforeState._lastChangedLine;
                    afterState._firstDirtyLine   = beforeState._firstDirtyLine;
                    afterState._lastDirtyLine    = beforeState._lastDirtyLine;

                    List <LineStateInfo> lineStates = new List <LineStateInfo>(beforeState._lineStates);

                    foreach (ITextChange change in e.Changes)
                    {
                        int lineNumberFromPosition = e.After.GetLineNumberFromPosition(change.NewPosition);
                        int num2 = e.After.GetLineNumberFromPosition(change.NewEnd);
                        if (change.LineCountDelta < 0)
                        {
                            lineStates.RemoveRange(lineNumberFromPosition, Math.Abs(change.LineCountDelta));
                        }
                        else if (change.LineCountDelta > 0)
                        {
                            TState        endLineState = lineStates[lineNumberFromPosition].EndLineState;
                            LineStateInfo element      = new LineStateInfo(endLineState);
                            lineStates.InsertRange(lineNumberFromPosition, Enumerable.Repeat(element, change.LineCountDelta));
                        }

                        if (afterState._lastDirtyLine > lineNumberFromPosition)
                        {
                            afterState._lastDirtyLine += change.LineCountDelta;
                        }

                        if (afterState._lastChangedLine > lineNumberFromPosition)
                        {
                            afterState._lastChangedLine += change.LineCountDelta;
                        }

                        for (int i = lineNumberFromPosition; i <= num2; i++)
                        {
                            TState        num5  = lineStates[i].EndLineState;
                            LineStateInfo info2 = new LineStateInfo(num5, true);
                            lineStates[i] = info2;
                        }

                        afterState._firstChangedLine = Math.Min(afterState._firstChangedLine ?? lineNumberFromPosition, lineNumberFromPosition);
                        afterState._lastChangedLine  = Math.Max(afterState._lastChangedLine ?? num2, num2);
                    }

                    Contract.Assert(lineStates.Count == afterState._lineStates.Length);
                    lineStates.CopyTo(afterState._lineStates);
                }
            }
            catch (TimeoutException)
            {
                _failedTimeout = true;
                UnsubscribeEvents();
            }
        }