public static Expression Parse(string source) { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read(source); var queue = new TokensQueue(tokens); return Parse(queue); }
protected static Expression ParseLiteral(ref Token token, TokensQueue queue) { if (!queue.Empty) { var preview = queue.Peek(); if (preview.Type == TokenType.OpenParenthesis) { return Func(token, queue); } } return new LiteralExpression(token.StringValue); }
public void LiteralTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("abc def qwe;"); Assert.AreEqual(6, tokens.Count); var queue = new TokensQueue(tokens); Assert.AreEqual("abc", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("def", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("qwe", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(";", queue.Read(TokenType.Semicolon).StringValue); }
public static IList<Expression> ParseArguments(TokensQueue tokens) { var res = new List<Expression>(); while (!tokens.Empty) { var preview = tokens.Peek(); if (preview.Type == TokenType.CloseParenthesis) return res; var arg = Parse(tokens); res.Add(arg); preview = tokens.Peek(); if (preview.Type != TokenType.Comma) break; tokens.Read(TokenType.Comma); } return res; }
protected static SelectorExpression ParsePseudoSelector(Token token, TokensQueue queue) { var next = queue.Read(TokenType.Literal); if (next.StringValue == "not") { var preview = queue.Peek(); if (preview.Type == TokenType.OpenParenthesis) { queue.Read(TokenType.OpenParenthesis); var expr = Parse(queue); if (!(expr is SimpleSelector)) throw new TokenException("simple selector expected", preview); queue.Read(TokenType.CloseParenthesis); return new NotExpression((SimpleSelector)expr); } } return new PseudoClassSelector(next.StringValue); }
protected static Expression Func(Token nameToken, TokensQueue tokens) { tokens.Read(TokenType.OpenParenthesis); var args = ParseArguments(tokens); tokens.Read(TokenType.CloseParenthesis); switch (nameToken.StringValue.ToLower()) { case "round": if (args.Count != 1) { throw new TokenException("expected 1 argument", nameToken); } return new RoundFunctionExpression(args.First()); default: throw new TokenException("unknown function " + nameToken.StringValue, nameToken); } }
private static SelectorExpression ParseAttributeSelector(Token token, TokensQueue tokens) { var attrName = tokens.Read(TokenType.Literal).StringValue; var operation = tokens.Read(); if (operation.Type == TokenType.CloseSquareBracket) return new AttributeExistsSelector(attrName); if (operation.Type == TokenType.Equal) { var val = tokens.Read(); if (val.Type != TokenType.Literal && val.Type != TokenType.String) { throw new TokenException("expected literal or string token", val); } tokens.Read(TokenType.CloseSquareBracket); return new AttributeEqualsSelector(attrName, val.StringValue); } throw new TokenException("unknown attribute operator", operation); }
public void MultilineCommentTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("/* comment\r\none*/\r\n/* comment\r\ntwo*/"); Assert.AreEqual(3, tokens.Count); var queue = new TokensQueue(tokens); var token = queue.Read(); Assert.AreEqual(TokenType.MultiLineComment, token.Type); Assert.AreEqual("/* comment\r\none*/", token.StringValue); queue.Read(TokenType.Whitespace); token = queue.Read(); Assert.AreEqual(TokenType.MultiLineComment, token.Type); Assert.AreEqual("/* comment\r\ntwo*/", token.StringValue); }
public void CssTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("p { color: red; }"); Assert.AreEqual(11, tokens.Count); var queue = new TokensQueue(tokens); Assert.AreEqual("p", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("{", queue.Read(TokenType.OpenCurlyBracket).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("color", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(":", queue.Read(TokenType.Colon).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("red", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(";", queue.Read(TokenType.Semicolon).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("}", queue.Read(TokenType.CloseCurlyBracket).StringValue); }
public void NumberTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("123 456 789.52"); Assert.AreEqual(5, tokens.Count); var queue = new TokensQueue(tokens); var token = queue.Read(TokenType.Number); Assert.AreEqual(123.0, token.NumberValue); queue.Read(TokenType.Whitespace); token = queue.Read(TokenType.Number); Assert.AreEqual(456.0, token.NumberValue); queue.Read(TokenType.Whitespace); token = queue.Read(TokenType.Number); Assert.AreEqual(789.52, token.NumberValue, 0.00000001); }
private static Expression ParseOperand(TokensQueue tokens) { tokens.SkipWhiteAndComments(); var token = tokens.Read(); switch (token.Type) { case TokenType.Number: return ParseNumber(ref token, tokens); case TokenType.Literal: return ParseLiteral(ref token, tokens); case TokenType.Hash: return ParseHashColor(ref token, tokens); case TokenType.Minus: return new NegateExpression(ParseOperand(tokens)); case TokenType.OpenParenthesis: var inner = Parse(tokens); tokens.Read(TokenType.CloseParenthesis); return inner; default: throw new TokenException("unexpected token " + token.StringValue, token); } }
public void VendorPropertyTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("-webkit-property: value;"); Assert.AreEqual(5, tokens.Count); var queue = new TokensQueue(tokens); Assert.AreEqual("-webkit-property", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(":", queue.Read(TokenType.Colon).StringValue); Assert.AreEqual(" ", queue.Read(TokenType.Whitespace).StringValue); Assert.AreEqual("value", queue.Read(TokenType.Literal).StringValue); Assert.AreEqual(";", queue.Read(TokenType.Semicolon).StringValue); }
public void SingleCommentTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("//comment one\r\n//comment two"); Assert.AreEqual(2, tokens.Count); var queue = new TokensQueue(tokens); var token = queue.Read(); Assert.AreEqual(TokenType.SingleLineComment, token.Type); Assert.AreEqual("//comment one\r\n", token.StringValue); token = queue.Read(); Assert.AreEqual(TokenType.SingleLineComment, token.Type); Assert.AreEqual("//comment two", token.StringValue); }
public ScssParserContext(TokensQueue tokens) { Tokens = tokens; }
private static Expression ParseHashColor(ref Token token, TokensQueue queue) { var val = queue.Read(TokenType.Literal); if (val.StringValue.Length == 6) { var rh = GetHexChar(ref val, 0); var rl = GetHexChar(ref val, 1); var gh = GetHexChar(ref val, 2); var gl = GetHexChar(ref val, 3); var bh = GetHexChar(ref val, 4); var bl = GetHexChar(ref val, 5); return new ColorExpression(rh * 16 + rl, gh * 16 + gl, bh * 16 + bl); } if (val.StringValue.Length == 3) { var rhl = GetHexChar(ref val, 0); var ghl = GetHexChar(ref val, 1); var bhl = GetHexChar(ref val, 2); return new ColorExpression(rhl * 17, ghl * 17, bhl * 17); } throw new TokenException("invalid hex color", token); }
private static SelectorExpression ProcessBinaryExpression(CombinatorType type, SelectorExpression left, TokensQueue tokens) { var tokenPriority = GetPriority(type); var other = ParseWithPriority(tokens, tokenPriority + 1); switch (type) { case CombinatorType.Combine: var combineCombinator = left as CombineCombinator; return combineCombinator != null ? combineCombinator.Add(other) : new CombineCombinator(left, other); case CombinatorType.Child: return new ChildCombinator(left, other); case CombinatorType.Sibling: return new SiblingCombinator(left, other); case CombinatorType.Descendant: var descendantCombinator = left as DescendantCombinator; return descendantCombinator != null ? descendantCombinator.Add(other) : new DescendantCombinator(left, other); case CombinatorType.Group: var groupCombinator = left as GroupCombinator; return groupCombinator != null ? groupCombinator.Add(other) : new GroupCombinator(left, other); default: throw new TokenException("unexpected operator", tokens.LastReadToken); } }
private static CombinatorType ReadCombinatorType(TokensQueue queue) { var hasWhite = false; while (!queue.Empty) { queue.SkipComments(); var preview = queue.Peek(); switch (preview.Type) { case TokenType.Whitespace: queue.Read(); hasWhite = true; break; case TokenType.Plus: queue.Read(); return CombinatorType.Sibling; case TokenType.Greater: queue.Read(); return CombinatorType.Child; case TokenType.Comma: queue.Read(); return CombinatorType.Group; case TokenType.OpenCurlyBracket: case TokenType.CloseParenthesis: return CombinatorType.Stop; default: return hasWhite ? CombinatorType.Descendant : CombinatorType.Combine; } } return CombinatorType.Stop; }
public static SelectorExpression Parse(TokensQueue tokens, SelectorExpression parent = null) { return ParseWithPriority(tokens, 0, parent); }
private static Expression ProcessBinaryExpression(Token opToken, Expression left, TokensQueue tokens) { var tokenPriority = GetPriority(opToken.Type); var other = ParseWithPriority(tokens, tokenPriority + 1); switch (opToken.Type) { case TokenType.Plus: return new AddExpression(left, other); case TokenType.Minus: return new SubExpression(left, other); case TokenType.Multiply: return new MulExpression(left, other); case TokenType.Divide: return new DivExpression(left, other); case TokenType.Percentage: return new ModExpression(left, other); case TokenType.Whitespace: if (left is SpaceGroupExpression) { return ((SpaceGroupExpression)left).Add(other); } return new SpaceGroupExpression(left, other); case TokenType.Comma: if (left is CommaGroupExpression) { return ((CommaGroupExpression)left).Add(other); } return new CommaGroupExpression(left, other); default: throw new TokenException("unexpected operator " + opToken.Type, opToken); } }
private static CombinatorType PeekCombinatorType(TokensQueue queue) { return ReadCombinatorType(queue.Moment()); }
protected static SelectorExpression ParseIdSelector(Token token, TokensQueue queue) { var next = queue.Read(TokenType.Literal); return new IdSelector(next.StringValue); }
private static Expression ParseNumber(ref Token token, TokensQueue queue) { var inner = new NumberExpression(token.NumberValue); if (!queue.Empty) { var preview = queue.Peek(); if (preview.Type == TokenType.Literal || preview.Type == TokenType.Percentage) { var unitToken = queue.Read(); var unit = ParseUnit(ref unitToken); return new UnitExpression(inner, unit); } } return inner; }
protected static SelectorExpression ParseTypeSelector(Token token, TokensQueue queue) { return new TypeSelector(token.StringValue); }
private static SelectorExpression ParseOperand(TokensQueue tokens, SelectorExpression parent = null) { tokens.SkipWhiteAndComments(); var token = tokens.Read(); switch (token.Type) { case TokenType.Literal: return ParseTypeSelector(token, tokens); case TokenType.Dot: return ParseClassSelector(token, tokens); case TokenType.Hash: return ParseIdSelector(token, tokens); case TokenType.Colon: return ParsePseudoSelector(token, tokens); case TokenType.OpenSquareBracket: return ParseAttributeSelector(token, tokens); case TokenType.Ampersand: return new ParentSelector(parent); default: throw new TokenException("unexpected token " + token.StringValue, token); } }
private static Expression ParseWithPriority(TokensQueue tokens, int priority) { var left = ParseOperand(tokens); Token? whiteToken = null; while (!tokens.Empty) { tokens.SkipComments(); var preview = tokens.Peek(); switch (preview.Type) { case TokenType.Semicolon: case TokenType.CloseParenthesis: case TokenType.ExclamationPoint: return left; } var tokenPriority = GetPriority(preview.Type); if (tokenPriority < priority) { return left; } switch (preview.Type) { case TokenType.Plus: case TokenType.Minus: case TokenType.Multiply: case TokenType.Divide: case TokenType.Percentage: case TokenType.LeftShift: case TokenType.RightShift: case TokenType.Comma: var token = tokens.Read(); left = ProcessBinaryExpression(token, left, tokens); whiteToken = null; break; case TokenType.Whitespace: whiteToken = tokens.Read(); break; default: if (whiteToken.HasValue) { left = ProcessBinaryExpression(whiteToken.Value, left, tokens); whiteToken = null; break; } throw new TokenException("unexpected token " + preview.StringValue, preview); } } return left; }
public static Expression Parse(TokensQueue tokens) { return ParseWithPriority(tokens, -10); }
public void NumberUnitTest() { var tokenizer = new Tokenizer(); var tokens = tokenizer.Read("123px 456em"); Assert.AreEqual(5, tokens.Count); var queue = new TokensQueue(tokens); Assert.AreEqual(123.0, queue.Read(TokenType.Number).NumberValue); Assert.AreEqual("px", queue.Read(TokenType.Literal).StringValue); queue.Read(TokenType.Whitespace); Assert.AreEqual(456.0, queue.Read(TokenType.Number).NumberValue); Assert.AreEqual("em", queue.Read(TokenType.Literal).StringValue); }
private static SelectorExpression ParseWithPriority(TokensQueue tokens, int priority, SelectorExpression parent = null) { var left = ParseOperand(tokens, parent); while (!tokens.Empty) { tokens.SkipComments(); var preview = tokens.Peek(); switch (preview.Type) { case TokenType.CloseParenthesis: case TokenType.OpenCurlyBracket: return left; } var combinatorType = PeekCombinatorType(tokens); var tokenPriority = GetPriority(combinatorType); if (tokenPriority < priority) { return left; } combinatorType = ReadCombinatorType(tokens); if (combinatorType == CombinatorType.Stop) return left; left = ProcessBinaryExpression(combinatorType, left, tokens); } return left; }