public void EscapeStateTokenizeNonWhitespace() { var lexer = new Lexer(); var tokens = lexer.Tokenize("\\S"); var expected = new NonWhitespaceToken(); Assert.AreEqual(tokens.Last(), expected); }
public void EscapeStateTokenizeNumeric() { var lexer = new Lexer(); var tokens = lexer.Tokenize("\\d"); var expected = new NumericToken(); Assert.AreEqual(tokens.Last(), expected); }
public void BeginSetStateTokenizeNot() { var lexer = new Lexer(); var tokens = lexer.Tokenize("[^"); var expected = new NotToken(); Assert.AreEqual(tokens.Last(), expected); }
public void EscapeStateTokenizeLiteral() { var lexer = new Lexer(); var tokens = lexer.Tokenize("\\z"); var expected = new LiteralToken { Character = 'z' }; Assert.AreEqual(tokens.Last(), expected); }
public void EscapeStateTokenizeWord() { var lexer = new Lexer(); var tokens = lexer.Tokenize("\\w"); var expected = new WordToken(); Assert.AreEqual(tokens.Last(), expected); }
public void SetStateTokenizeRange() { var lexer = new Lexer(); var tokens = lexer.Tokenize("[a-"); var expected = new RangeToken(); Assert.AreEqual(tokens.Last(), expected); }
public void SetStateTokenizeBracketRight() { var lexer = new Lexer(); var tokens = lexer.Tokenize("[a]"); var expected = new BracketRightToken(); Assert.AreEqual(tokens.Last(), expected); }
public void RepetitionStateTokenizeToManyCommas() { var lexer = new Lexer(); Assert.Throws<ArgumentException>(() => lexer.Tokenize("{1,,5}").ToList()); }
public void LiteralStateTokenizeBracketLeft() { var lexer = new Lexer(); var tokens = lexer.Tokenize("["); var expected = new BracketLeftToken(); Assert.AreEqual(tokens.First(), expected); }
public void RepetitionStateTokenizeMissingValues() { var lexer = new Lexer(); Assert.Throws<ArgumentException>(() => lexer.Tokenize("{}").ToList()); }
public void RepetitionStateTokenizeInvalid() { var lexer = new Lexer(); Assert.Throws<ArgumentException>(() => lexer.Tokenize("{1,a}").ToList()); }
public void LiteralStateTokenizeZeroOrOne() { var lexer = new Lexer(); var tokens = lexer.Tokenize("?"); var expected = new RepetitionToken { MinOccurs = 0, MaxOccurs = 1 }; Assert.AreEqual(tokens.First(), expected); }
public void LiteralStateTokenizeParenthesisRight() { var lexer = new Lexer(); var tokens = lexer.Tokenize(")"); var expected = new ParenthesisRightToken(); Assert.AreEqual(tokens.First(), expected); }
public void LiteralStateTokenizeLiterals() { var lexer = new Lexer(); var tokens = lexer.Tokenize("abc"); var expected = new List<IToken> { new LiteralToken {Character = 'a'} , new LiteralToken {Character = 'b'} , new LiteralToken {Character = 'c'} }; Assert.IsTrue(expected.SequenceEqual(tokens)); }
public void RepetitionStateTokenizeShort() { var lexer = new Lexer(); var tokens = lexer.Tokenize("{7}"); var expected = new RepetitionToken { MinOccurs = 7, MaxOccurs = 7 }; Assert.AreEqual(tokens.First(), expected); }
public void LiteralStateTokenizeAny() { var lexer = new Lexer(); var tokens = lexer.Tokenize("."); var expected = new AnyToken(); Assert.AreEqual(tokens.First(), expected); }
public INode Parse(string expression) { var tokens = new Lexer().Tokenize(expression); return Parse(tokens); }