static public TokenList MakeTokens(ITextProvider textProvider) { CssTokenizer tokenizer = new CssTokenizer(); TokenList tokens = tokenizer.Tokenize(textProvider, 0, textProvider.Length, keepWhiteSpace: false); return(tokens); }
public void CalcTest4() { var tokenizer = new CssTokenizer(new SourceReader("calc(100% / 3 - 2 * 1em - 2 * 1px)")); var tokens = new List<CssToken>(); do { tokens.Add(tokenizer.Read()); } while (!tokenizer.IsEnd); Assert.Equal(27, tokens.Count); Assert.Equal(TokenKind.LeftParenthesis, tokens[1].Kind); Assert.Equal(TokenKind.Number, tokens[2].Kind); Assert.Equal(TokenKind.Unit, tokens[3].Kind); Assert.Equal(TokenKind.Whitespace, tokens[4].Kind); Assert.Equal(TokenKind.Divide, tokens[5].Kind); Assert.Equal(TokenKind.Whitespace, tokens[6].Kind); /* Assert.Equal(tokens[6].Kind, TokenKind.Number); Assert.Equal(tokens[7].Kind, TokenKind.Whitespace); Assert.Equal(tokens[8].Kind, TokenKind.Subtract); Assert.Equal(tokens[9].Kind, TokenKind.Add); */ var styles = "main { margin: 0.5in; width: calc(100% / 3 - 2 * 1em - 2 * 1px); }"; Assert.Equal(@"main { margin: 0.5in; width: calc(100% / 3 - 2 * 1em - 2 * 1px); }", StyleSheet.Parse(styles).ToString()); }
/// <summary> /// Creates a style sheet for the given response asynchronously. /// </summary> /// <param name="response"> /// The response with the stream representing the source of the /// stylesheet. /// </param> /// <param name="options"> /// The options with the parameters for evaluating the style. /// </param> /// <param name="cancel">The cancellation token.</param> /// <returns>The task resulting in the style sheet.</returns> public async Task <IStyleSheet> ParseStylesheetAsync(IResponse response, StyleOptions options, CancellationToken cancel) { var context = options.Context; var configuration = context.Configuration; var parser = new CssParser(_options, configuration); var url = response.Address?.Href; var sheet = new CssStyleSheet(parser, url, options.Element) { IsDisabled = options.IsDisabled }; var source = new TextSource(response.Content); var tokenizer = new CssTokenizer(source); tokenizer.Error += (_, ev) => context.Fire(ev); var builder = new CssBuilder(tokenizer, parser); context.Fire(new CssParseEvent(sheet, completed: false)); try { await parser.ParseStylesheetAsync(sheet, source).ConfigureAwait(false); } catch (Exception e) { context.Fire(new CssErrorEvent(CssParseError.EOF, new TextPosition())); } context.Fire(new CssParseEvent(sheet, completed: true)); return(sheet); }
public void ParseDeclerationBlockTest() { const string CssTestStr = @"@charset ""UTF-8""; /*! Hello World this is a CSS comment */ html { font-family: sans-serif; } "; // Tokenize a CSS string and make sure it spits out the correct token sequence CssToken[] Actual = CssTokenizer.Parse(CssTestStr); CssToken[] Expected = new CssToken[] { new AtToken("charset"), WhitespaceToken.Space, new StringToken("UTF-8"), SemicolonToken.Instance, WhitespaceToken.LFLF, WhitespaceToken.LFLF, new IdentToken("html"), WhitespaceToken.Space, BracketOpenToken.Instance, WhitespaceToken.LFLF, new IdentToken("font-family"), ColonToken.Instance, WhitespaceToken.Space, new IdentToken("sans-serif"), SemicolonToken.Instance, WhitespaceToken.LFLF, BracketCloseToken.Instance, WhitespaceToken.LFLF, EOFToken.Instance }; var Engine = new Difference.DiffEngine <CssToken>(); var diff = Engine.Compile(Expected, Actual); if (diff.Count > 1) { Engine.DisplayHTML(diff); } Assert.Equal(Expected, Actual); }
public void ParseComplexTest() { const string CssTestStr = @"input[type=""checkbox""].filter-class-cb:checked + label.filter-class-lb, input[type=""checkbox""].filter-class-cb:active + label.filter-class-lb {}"; // Tokenize a CSS string and make sure it spits out the correct token sequence CssToken[] Actual = CssTokenizer.Parse(CssTestStr); CssToken[] Expected = new CssToken[] { new IdentToken("input"), SqBracketOpenToken.Instance, new IdentToken("type"), new DelimToken('='), new StringToken("checkbox"), SqBracketCloseToken.Instance, new DelimToken('.'), new IdentToken("filter-class-cb"), ColonToken.Instance, new IdentToken("checked"), WhitespaceToken.Space, new DelimToken('+'), WhitespaceToken.Space, new IdentToken("label"), new DelimToken('.'), new IdentToken("filter-class-lb"), CommaToken.Instance, WhitespaceToken.Space, new IdentToken("input"), SqBracketOpenToken.Instance, new IdentToken("type"), new DelimToken('='), new StringToken("checkbox"), SqBracketCloseToken.Instance, new DelimToken('.'), new IdentToken("filter-class-cb"), ColonToken.Instance, new IdentToken("active"), WhitespaceToken.Space, new DelimToken('+'), WhitespaceToken.Space, new IdentToken("label"), new DelimToken('.'), new IdentToken("filter-class-lb"), WhitespaceToken.Space, BracketOpenToken.Instance, BracketCloseToken.Instance, EOFToken.Instance }; var Engine = new Difference.DiffEngine <CssToken>(); var diff = Engine.Compile(Expected, Actual); if (diff.Count > 1) { Engine.DisplayHTML(diff); } Assert.Equal(Expected, Actual); }
public void CssTokenizerCarriageReturnLineFeed() { var teststring = "\r\n"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.Get(); Assert.AreEqual("\n", token.Data); }
public CssParser (CssTokenizer tokenizer) { this.tokenizer = tokenizer; tokenizer.CssDocumentStart += new CssTokenizer.CssDocumentStartHandler(CssDocumentStart); tokenizer.CssDocumentEnd += new CssTokenizer.CssDocumentEndHandler(CssDocumentEnd); tokenizer.NewCssToken += new CssTokenizer.NewCssTokenHandler(NewCssToken); }
public void CssTokenizerOnlyLineFeed() { var teststring = "\n"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.NextToken(); Assert.Equal("\n", token.Data); }
public void CssTokenizerOnlyLineFeed() { var teststring = "\n"; var tokenizer = new CssTokenizer(new TextSource(teststring), null); var token = tokenizer.Get(); Assert.AreEqual("\n", token.Data); }
public void CssTokenizerOnlyCarriageReturn() { var teststring = "\r"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.NextToken(); Assert.Equal("\n", token.Data); }
public void CssParserAtRule() { var teststring = "@media { background: blue; }"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.NextToken(); Assert.Equal(CssTokenType.AtKeyword, token.Type); }
public void CssParserIdentifier() { var teststring = "h1 { background: blue; }"; var tokenizer = new CssTokenizer(new TextSource(teststring), null); var token = tokenizer.Get(); Assert.AreEqual(CssTokenType.Ident, token.Type); }
public void Consume_NumberTest(object expected, string input, ENumericTokenType tokenType) { var Stream = new DataConsumer <char>(input.AsMemory()); CssTokenizer.Consume_Number(Stream, out ReadOnlyMemory <char> outResult, out object outNumber, out ENumericTokenType outTokenType); Assert.Equal(tokenType, outTokenType); Assert.Equal(expected, outNumber); }
public void CssParserIdentifier() { var teststring = "h1 { background: blue; }"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.NextToken(); var type = typeof(string).GetType().GetField(""); Assert.Equal(CssTokenType.Ident, token.Type); }
public void CssParserUrlSingleQuoted() { var url = "http://someurl"; var teststring = "url('" + url + "')"; var tokenizer = new CssTokenizer(new TextSource(teststring)); var token = tokenizer.NextToken(); Assert.Equal(url, token.Data); }
public void CssParserUrlDoubleQuoted() { var url = "http://someurl"; var teststring = "url(\"" + url + "\")"; var tokenizer = new CssTokenizer(new TextSource(teststring), null); var token = tokenizer.Get(); Assert.AreEqual(url, token.Data); }
public static CssValue Parse(string text) { using (var reader = new SourceReader(new StringReader(text))) { var tokenizer = new CssTokenizer(reader, LexicalMode.Value); var parser = new CssParser(tokenizer); return parser.ReadValueList(); } }
private static List <CssToken> GetTokens(string text) { var tokenizer = new CssTokenizer(new SourceReader(new StringReader(text))); var tokens = new List <CssToken>(); do { tokens.Add(tokenizer.Consume()); }while (!tokenizer.IsEnd); return(tokens); }
public void CssParserIdentifier() { var teststring = "h1 { background: blue; }"; var parser = new CssTokenizer(new TextSource(teststring)); var list = parser.Tokens; CssToken token = null; foreach (var item in list) { token = item; break; } Assert.AreEqual(CssTokenType.Ident, token.Type); }
public void CssParserAtRule() { var teststring = "@media { background: blue; }"; var parser = new CssTokenizer(new TextSource(teststring)); var list = parser.Tokens; CssToken token = null; foreach (var item in list) { token = item; break; } Assert.AreEqual(CssTokenType.AtKeyword, token.Type); }
public void TokenizerTest_EncodedUrl() { CssTokenizer tokenizer = new CssTokenizer(); // Base case: No encoding: ITextProvider text = new StringTextProvider(@"url('foo.jpg')"); TokenList actual = Helpers.MakeTokens(text); TokenizeFilesTest.CompareTokenArrays( new CssToken[] { new CssToken(CssTokenType.Url, 0, 4), new CssToken(CssTokenType.String, 4, 9), new CssToken(CssTokenType.CloseFunctionBrace, 13, 1), CssToken.EndOfFileToken(text) }, actual); #if SUPPORT_ENCODED_CSS // Escape characters: text = new StringTextProvider(@"\u\r\l('foo.jpg')"); actual = Helpers.MakeTokens(text.GetText(0, text.Length)); TokenizeFilesTest.CompareTokenArrays( new CssToken[] { new CssToken(CssTokenType.Url, 0, 7), new CssToken(CssTokenType.String, 7, 9), new CssToken(CssTokenType.CloseFunctionBrace, 16, 1), CssToken.EndOfFileToken(text) }, actual); // Unicode encode and escape characters: text = new StringTextProvider(@"u\52 \l(foo)"); actual = Helpers.MakeTokens(text.GetText(0, text.Length)); TokenizeFilesTest.CompareTokenArrays( new CssToken[] { new CssToken(CssTokenType.Url, 0, 8), new CssToken(CssTokenType.UnquotedUrlString, 8, 3), new CssToken(CssTokenType.CloseFunctionBrace, 11, 1), CssToken.EndOfFileToken(text) }, actual); #endif }
public void CssParserUrlSingleQuoted() { var url = "http://someurl"; var teststring = "url('" + url + "')"; var parser = new CssTokenizer(new TextSource(teststring)); var list = parser.Tokens; CssStringToken token = null; foreach (var item in list) { token = item as CssStringToken; break; } Assert.AreEqual(url, token.Data); }
/// <summary> /// Takes a string and transforms it into a selector object. /// </summary> /// <param name="selector">The string to parse.</param> /// <returns>The Selector object.</returns> public static Selector ParseSelector(String selector) { var tokenizer = new CssTokenizer(new SourceManager(selector)); var tokens = tokenizer.Tokens; var selctor = Pool.NewSelectorConstructor(); foreach (var token in tokens) { selctor.Apply(token); } var result = selctor.Result; selctor.ToPool(); return(result); }
public static CssValue Parse(string text) { if (text.Length == 0) { throw new ArgumentException("Must not be empty", nameof(text)); } if (char.IsDigit(text[0]) && TryParseNumberOrMeasurement(text, out var value)) { return(value !); } var reader = new SourceReader(new StringReader(text)); using var tokenizer = new CssTokenizer(reader, LexicalMode.Value); var parser = new CssParser(tokenizer); return(parser.ReadValueList()); }
public void CssNormalizeTest() { var nparts1 = new CssTokenizer(@"background-image:url(javascript:alert('XSS'))").Normalize().ToArray(); var nparts2 = new CssTokenizer(@"background-image:\0075\0072\006C\0028'\006a\0061\0076\0061\0073\0063\0072\0069\0070\0074\003a\0061\006c\0065\0072\0074\0028\0027\0058\0053\0053\0027\0029'\0029").Normalize().ToArray(); Assert.Equal(nparts1.Length, nparts2.Length); var nparts3 = new CssTokenizer(@"background-image:\0075\0072\006C\0028\006a\0061\0076\0061\0073\0063\0072\0069\0070\0074\003a\0061\006c\0065\0072\0074\0028\0027\0058\0053\0053\0027\0029\0029").Normalize().ToArray(); Assert.Equal(nparts1.Length, nparts3.Length); var nparts4 = new CssTokenizer(@"background-image:\0075r\006C\0028'\006a\0061\0076\0061\0073\0063\0072\0069\0070\0074\003a\0061\006c\0065\0072\0074\0028\0027\0058\0053\0053\0027\0029'\0029").Normalize().ToArray(); Assert.Equal(nparts1.Length, nparts4.Length); var nparts11 = new CssTokenizer(@"xss:expression(alert('XSS'))").Normalize().ToArray(); var nparts12 = new CssTokenizer(@"xss:expr/*XSS*/ession(alert('XSS'))").Normalize().ToArray(); Assert.Equal(nparts11.Length, nparts12.Length); var nparts13 = new CssTokenizer(@"xss:expr/*XSS*/ess/*XSS*/ion(alert('XSS'))").Normalize().ToArray(); Assert.Equal(nparts11.Length, nparts13.Length); }
public void ParseMutliSpecifierTest() { const string CssTestStr = @"a:active, a:hover { outline: 0; }"; // Tokenize a CSS string and make sure it spits out the correct token sequence CssToken[] Actual = CssTokenizer.Parse(CssTestStr); CssToken[] Expected = new CssToken[] { new IdentToken("a"), ColonToken.Instance, new IdentToken("active"), CommaToken.Instance, WhitespaceToken.LFLF, new IdentToken("a"), ColonToken.Instance, new IdentToken("hover"), WhitespaceToken.Space, BracketOpenToken.Instance, WhitespaceToken.LFLF, new IdentToken("outline"), ColonToken.Instance, WhitespaceToken.Space, new NumberToken(ENumericTokenType.Integer, "0", 0), SemicolonToken.Instance, WhitespaceToken.Space, BracketCloseToken.Instance, EOFToken.Instance }; var Engine = new Difference.DiffEngine <CssToken>(); var diff = Engine.Compile(Expected, Actual); if (diff.Count > 1) { Engine.DisplayHTML(diff); } Assert.Equal(Expected, Actual); }
public void ParsePseudoTest2() { const string CssTestStr = @"input[type=""checkbox""].filter-class-cb:not(:checked) + label.filter-class-lb[data-class-idx=""0""]::after {}"; // Tokenize a CSS string and make sure it spits out the correct token sequence CssToken[] Actual = CssTokenizer.Parse(CssTestStr); CssToken[] Expected = new CssToken[] { new IdentToken("input"), SqBracketOpenToken.Instance, new IdentToken("type"), new DelimToken('='), new StringToken("checkbox"), SqBracketCloseToken.Instance, new DelimToken('.'), new IdentToken("filter-class-cb"), ColonToken.Instance, new FunctionNameToken("not"), ColonToken.Instance, new IdentToken("checked"), ParenthesisCloseToken.Instance, WhitespaceToken.Space, new DelimToken('+'), WhitespaceToken.Space, new IdentToken("label"), new DelimToken('.'), new IdentToken("filter-class-lb"), SqBracketOpenToken.Instance, new IdentToken("data-class-idx"), new DelimToken('='), new StringToken("0"), SqBracketCloseToken.Instance, ColonToken.Instance, ColonToken.Instance, new IdentToken("after"), WhitespaceToken.Space, BracketOpenToken.Instance, BracketCloseToken.Instance, EOFToken.Instance }; var Engine = new Difference.DiffEngine <CssToken>(); var diff = Engine.Compile(Expected, Actual); if (diff.Count > 1) { Engine.DisplayHTML(diff); } Assert.Equal(Expected, Actual); }
public CssStyleState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
public CssUnknownState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
public CssMediaState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
public CssNamespaceState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
public CssKeyframesState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
public CssBuilder(CssTokenizer tokenizer, CssParser parser) { _tokenizer = tokenizer; _parser = parser; _nodes = new Stack<CssNode>(); }
public CssBuilder(CssTokenizer tokenizer, CssParser parser) { _tokenizer = tokenizer; _parser = parser; }
public CssParser(ReadOnlySpan <char> Text) { CssTokenizer Tokenizer = new CssTokenizer(Text); Stream = new DataConsumer <CssToken>(Tokenizer.Tokens); }
public CssCharsetState(CssTokenizer tokenizer, CssParser parser) : base(tokenizer, parser) { }
private static void VerifyRule <T>(Parser <CssToken, T> parser, string css, T expected) { var tokenizer = new CssTokenizer(new CssReader(css)); Assert.That(parser.ParseOrThrow(tokenizer.Tokenize()), Is.EqualTo(expected)); }