コード例 #1
0
 internal TokenStream(TokenList tokens, int length)
 {
     Tokens  = tokens;
     _length = length;
 }
コード例 #2
0
        /// <summary>
        /// Global method to compute the Result of a token change
        /// </summary>
        public static Result TokenizeChange(
            ICssTokenizerFactory tokenizerFactory,
            TokenList oldTokens,
            ITextProvider oldText,
            ITextProvider newText,
            int changeStart,
            int deletedLength,
            int insertedLength)
        {
            Result result            = new Result();
            char   firstInsertedChar = (insertedLength > 0) ? newText[changeStart] : '\0';

            result.NewTokens          = new TokenList();
            result.OldTokens          = oldTokens;
            result.OldTokenStart      = FindTokenToStart(oldTokens, changeStart, firstInsertedChar);
            result.OldTokenCount      = oldTokens.Count - result.OldTokenStart; // assume delete to EOF
            result.OldTokenTextOffset = insertedLength - deletedLength;
            result.TokenizationStart  = changeStart;

            if (result.OldTokenStart < oldTokens.Count)
            {
                // The first old token may start before the actual text change.
                // Adjust where tokenization starts:
                result.TokenizationStart = Math.Min(result.TokenizationStart, oldTokens[result.OldTokenStart].Start);
            }

            // Tokenize until EOF or until the new tokens start matching the old tokens
            bool tokenizeUntilEOF = (oldTokens.Count == 0);

            // Create and init a streaming tokenizer
            ICssTokenizer tokenizer       = tokenizerFactory.CreateTokenizer();
            int           estimatedLength = (tokenizeUntilEOF ? newText.Length - result.TokenizationStart : insertedLength);

            tokenizer.InitStream(newText, result.TokenizationStart, estimatedLength, keepWhiteSpace: false);

            for (CssToken token = tokenizer.StreamNextToken(); true; token = tokenizer.StreamNextToken())
            {
                if (token.TokenType != CssTokenType.EndOfFile && !tokenizeUntilEOF &&
                    token.Start >= changeStart + insertedLength)
                {
                    // This could be a good token for stopping, see if it matches an old token

                    int oldTokenStart = token.Start - result.OldTokenTextOffset;
                    int oldTokenIndex = oldTokens.FindInsertIndex(oldTokenStart, beforeExisting: true);

                    if (oldTokenIndex == oldTokens.Count)
                    {
                        tokenizeUntilEOF = true;
                    }
                    else
                    {
                        CssToken oldToken = oldTokens[oldTokenIndex];

                        if (oldToken.Start == oldTokenStart && CssToken.CompareTokens(token, oldToken, newText, oldText))
                        {
                            result.OldTokenCount = oldTokenIndex - result.OldTokenStart;
                            break;
                        }
                    }
                }

                result.NewTokens.Add(token);

                if (token.TokenType == CssTokenType.EndOfFile)
                {
                    break;
                }
            }

            return(result);
        }
コード例 #3
0
 internal TokenStream(TokenList tokens)
     : this(tokens, -1)
 {
 }