private void VerifyTokensAfterIncrementalChange(ICssTokenizerFactory tokenizerFactory, ITextProvider newText, TokenList newTokens) { ICssTokenizer tokenizer = tokenizerFactory.CreateTokenizer(); TokenList validTokens = tokenizer.Tokenize(newText, 0, newText.Length, keepWhiteSpace: false); if (validTokens.Count == newTokens.Count) { for (int i = 0; i < validTokens.Count && i < newTokens.Count; i++) { if (!CssToken.CompareTokens(validTokens[i], newTokens[i], newText, newText)) { Debug.Fail("The CssTree.Tokens list is bad"); break; } } } else { Debug.Fail("The CssTree.Tokens list is bad, wrong number of tokens"); } }
/// <summary> /// Global method to compute the Result of a token change /// </summary> public static Result TokenizeChange( ICssTokenizerFactory tokenizerFactory, TokenList oldTokens, ITextProvider oldText, ITextProvider newText, int changeStart, int deletedLength, int insertedLength) { Result result = new Result(); char firstInsertedChar = (insertedLength > 0) ? newText[changeStart] : '\0'; result.NewTokens = new TokenList(); result.OldTokens = oldTokens; result.OldTokenStart = FindTokenToStart(oldTokens, changeStart, firstInsertedChar); result.OldTokenCount = oldTokens.Count - result.OldTokenStart; // assume delete to EOF result.OldTokenTextOffset = insertedLength - deletedLength; result.TokenizationStart = changeStart; if (result.OldTokenStart < oldTokens.Count) { // The first old token may start before the actual text change. // Adjust where tokenization starts: result.TokenizationStart = Math.Min(result.TokenizationStart, oldTokens[result.OldTokenStart].Start); } // Tokenize until EOF or until the new tokens start matching the old tokens bool tokenizeUntilEOF = (oldTokens.Count == 0); // Create and init a streaming tokenizer ICssTokenizer tokenizer = tokenizerFactory.CreateTokenizer(); int estimatedLength = (tokenizeUntilEOF ? newText.Length - result.TokenizationStart : insertedLength); tokenizer.InitStream(newText, result.TokenizationStart, estimatedLength, keepWhiteSpace: false); for (CssToken token = tokenizer.StreamNextToken(); true; token = tokenizer.StreamNextToken()) { if (token.TokenType != CssTokenType.EndOfFile && !tokenizeUntilEOF && token.Start >= changeStart + insertedLength) { // This could be a good token for stopping, see if it matches an old token int oldTokenStart = token.Start - result.OldTokenTextOffset; int oldTokenIndex = oldTokens.FindInsertIndex(oldTokenStart, beforeExisting: true); if (oldTokenIndex == oldTokens.Count) { tokenizeUntilEOF = true; } else { CssToken oldToken = oldTokens[oldTokenIndex]; if (oldToken.Start == oldTokenStart && CssToken.CompareTokens(token, oldToken, newText, oldText)) { result.OldTokenCount = oldTokenIndex - result.OldTokenStart; break; } } } result.NewTokens.Add(token); if (token.TokenType == CssTokenType.EndOfFile) { break; } } return(result); }
/// <summary> /// CSS parser constructor /// </summary> /// <param name="itemFactory">Item factory <seealso cref=""/>. Null cause parser use /// default item factory. You can provide different factory if you are extending CSS /// parser, such as when implementing LESS CSS support.</param> internal CssParser(ICssTokenizerFactory tokenizerFactory, ICssItemFactory itemFactory) { TokenizerFactory = tokenizerFactory ?? new DefaultTokenizerFactory(); ExternalItemFactory = itemFactory; }