public static void SymbolExtension(LispLexer token, LexemeAttribute lexem, GenericLexer <LispLexer> lexer) { if (token == LispLexer.SYMBOL) { // callback on end_date node NodeCallback <GenericToken> callback = (FSMMatch <GenericToken> match) => { // this store the token id the the FSMMatch object to be later returned by GenericLexer.Tokenize match.Properties[GenericLexer <LispLexer> .DerivedToken] = LispLexer.SYMBOL; return(match); }; var fsmBuilder = lexer.FSMBuilder; var symbolCharExclusions = new char[] { '(', ')', '|', '#', '1', '2', '3', '4', '5', '6', '7', '8', '9', '0', '.', ' ', '\'', '\r', '\n' }; fsmBuilder.GoTo(GenericLexer <LispLexer> .start) .ExceptTransition(symbolCharExclusions) .Mark("end_symbol") .ExceptTransitionTo(symbolCharExclusions, "end_symbol") .End(GenericToken.Extension) .CallBack(callback); var graph = fsmBuilder.Fsm.ToGraphViz(); ; } }
private static IEnumerable <IAstNode> GetProgramAst(string input) { var lexer = new LispLexer(new TextSource(input)); var parser = new LispParser(new TokenSource <LispToken, LispTokenKind>(lexer)); return(parser.ParseProgram()); }
public void Can_tokenize_multiple_brackets() { var lexer = new LispLexer(); var list = "(() (()()()())"; var result = lexer.GetTokenList(list); var expected = new[] { "(", "(", ")", "(", "(", ")", "(", ")", "(", ")", "(", ")", ")" }; CollectionAssert.AreEqual(expected, result); }
public void Can_tokenize_integer() { var lexer = new LispLexer(); var list = "(1 22 3456789 )"; var result = lexer.GetTokenList(list); var expected = new[] { "(", "1", "22", "3456789", ")" }; CollectionAssert.AreEqual(expected, result); }
public void Can_tokenize_quoted_strings() { var lexer = new LispLexer(); var list = "(test \"neki string\" \"drugi string\")"; var result = lexer.GetTokenList(list); var expected = new[] { "(", "test", "neki string", "drugi string", ")" }; CollectionAssert.AreEqual(expected, result); }
public void Can_tokenize_empty_string() { var lexer = new LispLexer(); var list = ""; var result = lexer.GetTokenList(list); var expected = new string[0]; CollectionAssert.AreEqual(expected, result); }
public void Can_tokenize_empty_list() { var lexer = new LispLexer(); var list = "()"; var result = lexer.GetTokenList(list); var expected = new[] { "(", ")" }; CollectionAssert.AreEqual(expected, result); }
internal void GetTokens_String_ParsedCorrectly() { var lexer = new LispLexer(new TextSource("\"foobar\"")); var token = lexer.GetTokens().Single(); Assert.Equal("foobar", token.Value); Assert.Equal(LispTokenKind.String, token.TokenKind); }
internal void GetTokens_TokenWithComment() { var lexer = new LispLexer(new TextSource("foo;bar")); var token = lexer.GetTokens().Single(); Assert.Equal(LispTokenKind.Symbol, token.TokenKind); Assert.Equal("foo", token.Value); }
internal void GetTokens_SingleTokenInputs(string input, LispTokenKind expectedTokenKind) { var lexer = new LispLexer(new TextSource(input)); var token = lexer.GetTokens().Single(); Assert.Equal(expectedTokenKind, token.TokenKind); Assert.Equal(input, token.Value); }
internal void GetTokens_MultipleTokensInInput() { const string input = "(add (mul 2 2) 2)"; var lexer = new LispLexer(new TextSource(input)); var actualTokens = lexer.GetTokens().ToArray(); var expectedTokens = new[] { new LispToken(LispTokenKind.LParen, "("), new LispToken(LispTokenKind.Symbol, "add"), new LispToken(LispTokenKind.LParen, "("), new LispToken(LispTokenKind.Symbol, "mul"), new LispToken(LispTokenKind.Number, "2"), new LispToken(LispTokenKind.Number, "2"), new LispToken(LispTokenKind.RParen, ")"), new LispToken(LispTokenKind.Number, "2"), new LispToken(LispTokenKind.RParen, ")"), }; AssertTokens(expectedTokens, actualTokens); }