コード例 #1
0
        /// <summary>
        /// Parse CSS from a text provider
        /// </summary>
        /// <param name="text">Text provider that supplies text</param>
        /// <param name="insertComments">True stylesheet should include comment items</param>
        /// <returns>Stylesheet object</returns>
        public StyleSheet Parse(ITextProvider text, bool insertComments)
        {
            DateTime startTime = DateTime.UtcNow;

            ICssTokenizer tokenizer    = CreateTokenizer();
            ITextProvider textProvider = text ?? new StringTextProvider(string.Empty);
            TokenList     tokens       = tokenizer.Tokenize(textProvider, 0, textProvider.Length, keepWhiteSpace: false);

            LastTokenizeMilliseconds = (int)(DateTime.UtcNow - startTime).TotalMilliseconds;

            return(Parse(text, tokens, insertComments));
        }
コード例 #2
0
        /// <summary>
        /// Internal function to parse a new StyleSheet object
        /// </summary>
        private void ParseNewStyleSheet(ITextProvider textProvider, TokenList tokens)
        {
            ICssParser parser = _parserFactory.CreateParser();

            if (tokens == null)
            {
                ICssTokenizer tokenizer = parser.TokenizerFactory.CreateTokenizer();
                tokens = tokenizer.Tokenize(textProvider, 0, textProvider.Length, keepWhiteSpace: false);
            }

            using (CreateWriteLock())
            {
                Tokens     = tokens;
                StyleSheet = parser.Parse(textProvider, tokens, insertComments: true);
            }

            TreeUpdated?.Invoke(this, new CssTreeUpdateEventArgs(this));
        }
コード例 #3
0
        internal InlineStyle ParseInlineStyle(ITextProvider text, bool insertComments)
        {
            InlineStyle inlineStyle = new InlineStyle();

            ICssTokenizer tokenizer    = CreateTokenizer();
            ITextProvider textProvider = text ?? new StringTextProvider(string.Empty);
            TokenList     tokens       = tokenizer.Tokenize(textProvider, 0, textProvider.Length, keepWhiteSpace: false);

            IList <Comment> comments    = ExtractComments(text, tokens, 0, tokens.Count);
            TokenStream     tokenStream = new TokenStream(tokens)
            {
                SkipComments = true
            };

            ItemFactory itemFactory = new ItemFactory(ExternalItemFactory, textProvider, tokenStream);

            if (!inlineStyle.Parse(itemFactory, textProvider, tokenStream))
            {
                inlineStyle = null;
            }
            else
            {
                if (insertComments)
                {
                    foreach (ParseItem comment in comments)
                    {
                        inlineStyle.InsertChildIntoSubtree(comment);
                    }
                }

                // There must be a StyleSheet root object, so create one
                StyleSheet styleSheet = new StyleSheet
                {
                    TextProvider = textProvider
                };

                styleSheet.Children.Add(inlineStyle);
            }

            return(inlineStyle);
        }
コード例 #4
0
        private void VerifyTokensAfterIncrementalChange(ICssTokenizerFactory tokenizerFactory, ITextProvider newText, TokenList newTokens)
        {
            ICssTokenizer tokenizer   = tokenizerFactory.CreateTokenizer();
            TokenList     validTokens = tokenizer.Tokenize(newText, 0, newText.Length, keepWhiteSpace: false);

            if (validTokens.Count == newTokens.Count)
            {
                for (int i = 0; i < validTokens.Count && i < newTokens.Count; i++)
                {
                    if (!CssToken.CompareTokens(validTokens[i], newTokens[i], newText, newText))
                    {
                        Debug.Fail("The CssTree.Tokens list is bad");
                        break;
                    }
                }
            }
            else
            {
                Debug.Fail("The CssTree.Tokens list is bad, wrong number of tokens");
            }
        }
コード例 #5
0
        /// <summary>
        /// Global method to compute the Result of a token change
        /// </summary>
        public static Result TokenizeChange(
            ICssTokenizerFactory tokenizerFactory,
            TokenList oldTokens,
            ITextProvider oldText,
            ITextProvider newText,
            int changeStart,
            int deletedLength,
            int insertedLength)
        {
            Result result            = new Result();
            char   firstInsertedChar = (insertedLength > 0) ? newText[changeStart] : '\0';

            result.NewTokens          = new TokenList();
            result.OldTokens          = oldTokens;
            result.OldTokenStart      = FindTokenToStart(oldTokens, changeStart, firstInsertedChar);
            result.OldTokenCount      = oldTokens.Count - result.OldTokenStart; // assume delete to EOF
            result.OldTokenTextOffset = insertedLength - deletedLength;
            result.TokenizationStart  = changeStart;

            if (result.OldTokenStart < oldTokens.Count)
            {
                // The first old token may start before the actual text change.
                // Adjust where tokenization starts:
                result.TokenizationStart = Math.Min(result.TokenizationStart, oldTokens[result.OldTokenStart].Start);
            }

            // Tokenize until EOF or until the new tokens start matching the old tokens
            bool tokenizeUntilEOF = (oldTokens.Count == 0);

            // Create and init a streaming tokenizer
            ICssTokenizer tokenizer       = tokenizerFactory.CreateTokenizer();
            int           estimatedLength = (tokenizeUntilEOF ? newText.Length - result.TokenizationStart : insertedLength);

            tokenizer.InitStream(newText, result.TokenizationStart, estimatedLength, keepWhiteSpace: false);

            for (CssToken token = tokenizer.StreamNextToken(); true; token = tokenizer.StreamNextToken())
            {
                if (token.TokenType != CssTokenType.EndOfFile && !tokenizeUntilEOF &&
                    token.Start >= changeStart + insertedLength)
                {
                    // This could be a good token for stopping, see if it matches an old token

                    int oldTokenStart = token.Start - result.OldTokenTextOffset;
                    int oldTokenIndex = oldTokens.FindInsertIndex(oldTokenStart, beforeExisting: true);

                    if (oldTokenIndex == oldTokens.Count)
                    {
                        tokenizeUntilEOF = true;
                    }
                    else
                    {
                        CssToken oldToken = oldTokens[oldTokenIndex];

                        if (oldToken.Start == oldTokenStart && CssToken.CompareTokens(token, oldToken, newText, oldText))
                        {
                            result.OldTokenCount = oldTokenIndex - result.OldTokenStart;
                            break;
                        }
                    }
                }

                result.NewTokens.Add(token);

                if (token.TokenType == CssTokenType.EndOfFile)
                {
                    break;
                }
            }

            return(result);
        }