public void TestLexerError() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <AlphaNumDashId> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var source = "hello world 2 + 2 "; var r = lexer.Tokenize(source); Assert.True(r.IsError); var error = r.Error; Assert.Equal(0, error.Line); Assert.Equal(13, error.Column); Assert.Equal('2', error.UnexpectedChar); Assert.Contains("Unrecognized symbol", error.ToString()); }
public void TestSingleQuotedString() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <SingleQuotedString> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var source = "hello \\'world "; var expected = "hello 'world "; var r = lexer.Tokenize($"'{source}'"); Assert.True(r.IsOk); Assert.Equal(2, r.Tokens.Count); var tok = r.Tokens[0]; Assert.Equal(SingleQuotedString.SingleString, tok.TokenID); Assert.Equal(expected, tok.StringWithoutQuotes); }
public static void TestTokenCallBacks() { var res = LexerBuilder.BuildLexer(new BuildResult <ILexer <CallbackTokens> >()); if (!res.IsError) { var lexer = res.Result as GenericLexer <CallbackTokens>; CallBacksBuilder.BuildCallbacks <CallbackTokens>(lexer); var tokens = lexer.Tokenize("aaa bbb").ToList(); ; foreach (var token in tokens) { Console.WriteLine($"{token.TokenID} - {token.Value}"); } } }
public void TestStringDelimiters() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <StringDelimiters> >()); Assert.True(lexerRes.IsOk); var lexer = lexerRes.Result; var r = lexer.Tokenize("'that''s it'"); Assert.True(r.IsOk); Assert.NotNull(r.Tokens); Assert.NotEmpty(r.Tokens); Assert.Equal(2, r.Tokens.Count); var tok = r.Tokens[0]; Assert.Equal("'that's it'", tok.Value); Assert.Equal("that's it", tok.StringWithoutQuotes); }
public void TestTokenCallbacks() { var res = LexerBuilder.BuildLexer(new BuildResult <ILexer <CallbackTokens> >()); Assert.False(res.IsError); var lexer = res.Result as GenericLexer <CallbackTokens>; CallBacksBuilder.BuildCallbacks <CallbackTokens>(lexer); var tokens = lexer.Tokenize("aaa bbb").ToList(); Assert.Equal(3, tokens.Count); Assert.Equal("AAA", tokens[0].Value); Assert.Equal("BBB", tokens[1].Value); Assert.Equal(CallbackTokens.SKIP, tokens[1].TokenID); }
public void TestGenericMultiLineComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <CommentsToken>; var dump = lexer.ToString(); var code = @"1 2 /* multi line comment on 2 lines */ 3.0"; var r = lexer.Tokenize(code); Assert.True(r.IsOk); var tokens = r.Tokens; Assert.Equal(5, tokens.Count); var intToken1 = tokens[0]; var intToken2 = tokens[1]; var multiLineCommentToken = tokens[2]; var doubleToken = tokens[3]; Assert.Equal(CommentsToken.INT, intToken1.TokenID); Assert.Equal("1", intToken1.Value); Assert.Equal(0, intToken1.Position.Line); Assert.Equal(0, intToken1.Position.Column); Assert.Equal(CommentsToken.INT, intToken2.TokenID); Assert.Equal("2", intToken2.Value); Assert.Equal(1, intToken2.Position.Line); Assert.Equal(0, intToken2.Position.Column); Assert.Equal(CommentsToken.COMMENT, multiLineCommentToken.TokenID); Assert.Equal(@" multi line comment on 2 lines ", multiLineCommentToken.Value); Assert.Equal(1, multiLineCommentToken.Position.Line); Assert.Equal(2, multiLineCommentToken.Position.Column); Assert.Equal(CommentsToken.DOUBLE, doubleToken.TokenID); Assert.Equal("3.0", doubleToken.Value); Assert.Equal(2, doubleToken.Position.Line); Assert.Equal(22, doubleToken.Position.Column); }
public void TestGenericMultiLineCommentWithSingleLineComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <SingleLineCommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <SingleLineCommentsToken>; var dump = lexer.ToString(); var r = lexer?.Tokenize(@"1 2 /* multi line comment on 2 lines */ 3.0"); Assert.True(r.IsError); var tokens = r.Tokens; Assert.Equal('*', r.Error.UnexpectedChar); }
public void TestGenericSingleLineComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <MultiLineCommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <MultiLineCommentsToken>; var dump = lexer.ToString(); var error = Assert.Throws <LexerException>(() => { lexer.Tokenize(@"1 2 // single line comment 3.0"); }); Assert.Equal('/', error.Error.UnexpectedChar); }
public void TestCustomId() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <CustomId> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("a_-Bc ZyX-_"); Assert.True(r.IsOk); Assert.Equal(3, r.Tokens.Count); var tok1 = r.Tokens[0]; Assert.Equal(CustomId.ID, tok1.TokenID); Assert.Equal("a_-Bc", tok1.Value); var tok2 = r.Tokens[1]; Assert.Equal(CustomId.ID, tok2.TokenID); Assert.Equal("ZyX-_", tok2.Value); }
public void MultipleAttributes() { var lexerRes6 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError6> >()); Assert.True(lexerRes6.IsError); Assert.Single(lexerRes6.Errors); var expectedErrors = new[] { "too many comment lexem" }; foreach (var expectedError in expectedErrors) { Assert.True(lexerRes6.Errors.Exists(x => x.Level == ErrorLevel.FATAL && x.Message == expectedError)); } var lexerRes5 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError5> >()); Assert.True(lexerRes5.IsError); Assert.Single(lexerRes5.Errors); expectedErrors = new[] { "too many multi-line comment lexem" }; foreach (var expectedError in expectedErrors) { Assert.True(lexerRes5.Errors.Exists(x => x.Level == ErrorLevel.FATAL && x.Message == expectedError)); } var lexerRes4 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError4> >()); Assert.True(lexerRes4.IsError); Assert.Single(lexerRes4.Errors); expectedErrors = new[] { "too many single-line comment lexem" }; foreach (var expectedError in expectedErrors) { Assert.True(lexerRes4.Errors.Exists(x => x.Level == ErrorLevel.FATAL && x.Message == expectedError)); } }
public void TestKeyWordIgnoreCase() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <KeyWordIgnoreCase> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var r = lexer.Tokenize("keyword KeYwOrD"); Assert.True(r.IsOk); Assert.Equal(3, r.Tokens.Count); var tok1 = r.Tokens[0]; Assert.Equal(KeyWordIgnoreCase.KEYWORD, tok1.TokenID); Assert.Equal("keyword", tok1.Value); var tok2 = r.Tokens[1]; Assert.Equal(KeyWordIgnoreCase.KEYWORD, tok2.TokenID); Assert.Equal("KeYwOrD", tok2.Value); }
public void TestGenericMultiLineComment() { var lexerRes = LexerBuilder.BuildLexer <CommentsToken>(new BuildResult <ILexer <CommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <CommentsToken>; string dump = lexer.ToString(); string code = @"1 2 /* multi line comment on 2 lines */ 3.0"; var tokens = lexer.Tokenize(code).ToList(); Assert.Equal(5, tokens.Count); var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; Assert.Equal(CommentsToken.INT, token1.TokenID); Assert.Equal("1", token1.Value); Assert.Equal(0, token1.Position.Line); Assert.Equal(0, token1.Position.Column); Assert.Equal(CommentsToken.INT, token2.TokenID); Assert.Equal("2", token2.Value); Assert.Equal(1, token2.Position.Line); Assert.Equal(0, token2.Position.Column); Assert.Equal(CommentsToken.COMMENT, token3.TokenID); Assert.Equal(@" multi line comment on 2 lines ", token3.Value); Assert.Equal(1, token3.Position.Line); Assert.Equal(2, token3.Position.Column); Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); Assert.Equal("3.0", token4.Value); Assert.Equal(2, token4.Position.Line); Assert.Equal(22, token4.Position.Column); }
public void MixedErrors() { var lexerRes10 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError10> >()); Assert.True(lexerRes10.IsError); Assert.Equal(5, lexerRes10.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes10.Errors[1].Level); Assert.Equal("too many comment lexem", lexerRes10.Errors[1].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes10.Errors[2].Level); Assert.Equal("too many multi-line comment lexem", lexerRes10.Errors[2].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes10.Errors[3].Level); Assert.Equal("too many single-line comment lexem", lexerRes10.Errors[3].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes10.Errors[4].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes10.Errors[4].Message); var lexerRes9 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError9> >()); Assert.True(lexerRes9.IsError); Assert.Equal(3, lexerRes9.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes9.Errors[1].Level); Assert.Equal("too many multi-line comment lexem", lexerRes9.Errors[1].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes9.Errors[2].Level); Assert.Equal("too many single-line comment lexem", lexerRes9.Errors[2].Message); var lexerRes8 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError8> >()); Assert.True(lexerRes8.IsError); Assert.Equal(3, lexerRes8.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes8.Errors[1].Level); Assert.Equal("too many multi-line comment lexem", lexerRes8.Errors[1].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes8.Errors[2].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes8.Errors[2].Message); var lexerRes7 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError7> >()); Assert.True(lexerRes7.IsError); Assert.Equal(3, lexerRes7.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes7.Errors[1].Level); Assert.Equal("too many single-line comment lexem", lexerRes7.Errors[1].Message); Assert.Equal(ErrorLevel.FATAL, lexerRes7.Errors[2].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes7.Errors[2].Message); }
public void TestGenericSingleLineComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <CommentsToken>; var dump = lexer.ToString(); var r = lexer.Tokenize(@"1 2 // single line comment 3.0"); Assert.True(r.IsOk); var tokens = r.Tokens; Assert.Equal(5, tokens.Count); var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; Assert.Equal(CommentsToken.INT, token1.TokenID); Assert.Equal("1", token1.Value); Assert.Equal(0, token1.Position.Line); Assert.Equal(0, token1.Position.Column); Assert.Equal(CommentsToken.INT, token2.TokenID); Assert.Equal("2", token2.Value); Assert.Equal(1, token2.Position.Line); Assert.Equal(0, token2.Position.Column); Assert.Equal(CommentsToken.COMMENT, token3.TokenID); Assert.Equal(" single line comment", token3.Value.Replace("\r", "").Replace("\n", "")); Assert.Equal(1, token3.Position.Line); Assert.Equal(2, token3.Position.Column); Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); Assert.Equal("3.0", token4.Value); Assert.Equal(2, token4.Position.Line); Assert.Equal(0, token4.Position.Column); }
public void TestIssue106() { var res = LexerBuilder.BuildLexer(new BuildResult <ILexer <Issue106> >()); Assert.False(res.IsError); var lexer = res.Result as GenericLexer <Issue106>; var r = lexer.Tokenize("1."); Assert.True(r.IsOk); var tokens = r.Tokens; Assert.NotNull(tokens); Assert.Equal(3, tokens.Count); var token = tokens[0]; Assert.Equal(Issue106.Integer, token.TokenID); Assert.Equal(1, token.IntValue); token = tokens[1]; Assert.Equal(Issue106.Period, token.TokenID); }
public void TestExtensions() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <Extensions> >(), ExtendedGenericLexer.AddExtension); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <Extensions>; Assert.NotNull(lexer); var r = lexer.Tokenize("20.02.2018 3.14"); Assert.True(r.IsOk); Assert.Equal(3, r.Tokens.Count); Assert.Equal(Extensions.DATE, r.Tokens[0].TokenID); Assert.Equal("20.02.2018", r.Tokens[0].Value); Assert.Equal(Extensions.DOUBLE, r.Tokens[1].TokenID); Assert.Equal("3.14", r.Tokens[1].Value); }
public void NotEndingMultiComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <MultiLineCommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <MultiLineCommentsToken>; var dump = lexer.ToString(); var code = @"1 2 /* not ending comment"; var r = lexer.Tokenize(code); Assert.True(r.IsOk); var tokens = r.Tokens; Assert.Equal(4, tokens.Count); var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; Assert.Equal(MultiLineCommentsToken.INT, token1.TokenID); Assert.Equal("1", token1.Value); Assert.Equal(0, token1.Position.Line); Assert.Equal(0, token1.Position.Column); Assert.Equal(MultiLineCommentsToken.INT, token2.TokenID); Assert.Equal("2", token2.Value); Assert.Equal(1, token2.Position.Line); Assert.Equal(0, token2.Position.Column); Assert.Equal(MultiLineCommentsToken.COMMENT, token3.TokenID); Assert.Equal(@" not ending comment", token3.Value); Assert.Equal(1, token3.Position.Line); Assert.Equal(2, token3.Position.Column); }
public void TestIssue177() { var res = LexerBuilder.BuildLexer(new BuildResult <ILexer <Issue177Generic> >()); Assert.False(res.IsError); var lexer = res.Result; var result = lexer.Tokenize(@"1 2 2 3 4 5"); Assert.True(result.IsOk); Assert.Equal(9, result.Tokens.Count); ; Action <Token <Issue177Generic>, int, int, int> assertToken = (Token <Issue177Generic> Token, int expectedLine, int expectedColumn, int expectedValue) => { Assert.Equal(expectedValue, Token.IntValue); Assert.Equal(expectedLine, Token.Position.Line); Assert.Equal(expectedColumn, Token.Position.Column); }; assertToken(result.Tokens[0], 0, 0, 1); assertToken(result.Tokens[1], 0, 2, 2); assertToken(result.Tokens[3], 1, 0, 2); assertToken(result.Tokens[4], 1, 2, 3); assertToken(result.Tokens[6], 2, 0, 4); assertToken(result.Tokens[7], 2, 2, 5); // var toks = ToTokens(result); // var res2 = LexerBuilder.BuildLexer(new BuildResult<ILexer<Issue177Regex>>()); // Assert.False(res.IsError); // var lexer2 = res.Result; // var result2 = lexer2.Tokenize(@"1 // 2 // 3"); // Assert.True(result2.IsOk); // Assert.Equal(9,result2.Tokens.Count); ; }
public void TestMixedEOLComment() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsToken> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <CommentsToken>; var dump = lexer.ToString(); var code = "1\n2\r\n/* multi line \rcomment on 2 lines */ 3.0"; var r = lexer.Tokenize(code); Assert.True(r.IsOk); var tokens = r.Tokens; Assert.Equal(5, tokens.Count); var token1 = tokens[0]; var token2 = tokens[1]; var token3 = tokens[2]; var token4 = tokens[3]; Assert.Equal(CommentsToken.INT, token1.TokenID); Assert.Equal("1", token1.Value); Assert.Equal(0, token1.Position.Line); Assert.Equal(0, token1.Position.Column); Assert.Equal(CommentsToken.INT, token2.TokenID); Assert.Equal("2", token2.Value); Assert.Equal(1, token2.Position.Line); Assert.Equal(0, token2.Position.Column); Assert.Equal(CommentsToken.COMMENT, token3.TokenID); Assert.Equal(" multi line \rcomment on 2 lines ", token3.Value); Assert.Equal(2, token3.Position.Line); Assert.Equal(0, token3.Position.Column); Assert.Equal(CommentsToken.DOUBLE, token4.TokenID); Assert.Equal("3.0", token4.Value); Assert.Equal(3, token4.Position.Line); Assert.Equal(22, token4.Position.Column); }
public void TestLexerBuildErrors() { var result = new BuildResult <ILexer <BadTokens> >(); result = LexerBuilder.BuildLexer <BadTokens>(result); Assert.True(result.IsError); Assert.Equal(2, result.Errors.Count); var errors = result.Errors.Where(e => e.Level == ErrorLevel.ERROR).ToList(); var warnings = result.Errors.Where(e => e.Level == ErrorLevel.WARN).ToList(); Assert.Single(errors); var errorMessage = errors[0].Message; Assert.True(errorMessage.Contains(BadTokens.BadRegex.ToString()) && errorMessage.Contains("BadRegex")); Assert.Single(warnings); var warnMessage = warnings[0].Message; Assert.True(warnMessage.Contains(BadTokens.MissingLexeme.ToString()) && warnMessage.Contains("not have Lexeme")); ; }
public void TestSelfEscapedString() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <SelfEscapedString> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result as GenericLexer <SelfEscapedString>; Assert.NotNull(lexer); var tokens = lexer.Tokenize("'that''s it'").ToList(); Assert.Equal(2, tokens.Count); var tok = tokens[0]; Assert.Equal(SelfEscapedString.STRING, tok.TokenID); Assert.Equal("'that's it'", tokens[0].Value); tokens = lexer.Tokenize("'et voilĂ '").ToList(); Assert.Equal(2, tokens.Count); tok = tokens[0]; Assert.Equal(SelfEscapedString.STRING, tok.TokenID); Assert.Equal("'et voilĂ '", tokens[0].Value); }
public void TestManyString() { var lexerRes = LexerBuilder.BuildLexer(new BuildResult <ILexer <ManyString> >()); Assert.False(lexerRes.IsError); var lexer = lexerRes.Result; var string1 = "\"hello \\\"world \""; var string2 = "'that''s it'"; var source1 = $"{string1} {string2}"; var r = lexer.Tokenize(source1).ToList(); Assert.Equal(3, r.Count); var tok1 = r[0]; Assert.Equal(ManyString.STRING, tok1.TokenID); Assert.Equal(string1, tok1.Value); var tok2 = r[1]; Assert.Equal(ManyString.STRING, tok2.TokenID); Assert.Equal(string2, tok2.Value); }
public void TestCharTokens() { var res = LexerBuilder.BuildLexer(new BuildResult <ILexer <CharTokens> >()); Assert.False(res.IsError); var lexer = res.Result as GenericLexer <CharTokens>; var dump = lexer.ToString(); var grpah = lexer.ToGraphViz(); Assert.NotNull(lexer); var res1 = lexer.Tokenize("'c'"); Assert.False(res1.IsError); Assert.Equal(2, res1.Tokens.Count); Token <CharTokens> token = res1.Tokens[0]; Assert.Equal('c', token.CharValue); Assert.Equal(CharTokens.MyChar, token.TokenID); var lastToken = res1.Tokens.Last(); var source = "'\\''"; var res2 = lexer.Tokenize(source); Assert.False(res2.IsError); Assert.Equal(2, res2.Tokens.Count); token = res2.Tokens[0]; Assert.Equal(source, token.Value); Assert.Equal(CharTokens.MyChar, token.TokenID); var sourceU = "'\\u0066'"; var res3 = lexer.Tokenize(sourceU); Assert.False(res3.IsError); Assert.Equal(2, res3.Tokens.Count); token = res3.Tokens[0]; Assert.Equal(sourceU, token.Value); Assert.Equal(CharTokens.MyChar, token.TokenID); }
public void MultipleAttributes() { var lexerRes6 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError6> >()); Assert.True(lexerRes6.IsError); Assert.Equal(2, lexerRes6.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes6.Errors[1].Level); Assert.Equal("too many comment lexem", lexerRes6.Errors[1].Message); var lexerRes5 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError5> >()); Assert.True(lexerRes5.IsError); Assert.Equal(2, lexerRes5.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes5.Errors[1].Level); Assert.Equal("too many multi-line comment lexem", lexerRes5.Errors[1].Message); var lexerRes4 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError4> >()); Assert.True(lexerRes4.IsError); Assert.Equal(2, lexerRes4.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes4.Errors[1].Level); Assert.Equal("too many single-line comment lexem", lexerRes4.Errors[1].Message); }
public void RedundantAttributes() { var lexerRes3 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError3> >()); Assert.True(lexerRes3.IsError); Assert.Equal(2, lexerRes3.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes3.Errors[1].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes3.Errors[1].Message); var lexerRes2 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError2> >()); Assert.True(lexerRes2.IsError); Assert.Equal(2, lexerRes2.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes2.Errors[1].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes2.Errors[1].Message); var lexerRes1 = LexerBuilder.BuildLexer(new BuildResult <ILexer <CommentsTokenError1> >()); Assert.True(lexerRes1.IsError); Assert.Equal(2, lexerRes1.Errors.Count); Assert.Equal(ErrorLevel.FATAL, lexerRes1.Errors[1].Level); Assert.Equal("comment lexem can't be used together with single-line or multi-line comment lexems", lexerRes1.Errors[1].Message); }
public Interpreter() { InitEvaluator(); evaluator.PreEvaluateFunction += AddUserFunctions; var built = LexerBuilder.BuildLexer <TokenType>(); if (built.Errors.FindAll(x => x.Level != sly.buildresult.ErrorLevel.WARN).Count != 0) { WriteLineColor($"Could not build lexer!", Red); foreach (var e in built.Errors) { if (e.Level == sly.buildresult.ErrorLevel.WARN) { continue; } WriteLineColor($"\t{e.GetType()} level {e.Level}: {e.Message}", Red); } lexer = null; } else { lexer = built.Result; } }
public Interpreter() { // Build lexer sly.buildresult.BuildResult <ILexer <TokenType> > built = LexerBuilder.BuildLexer <TokenType>(); // Print out lexer errors if (built.Errors.FindAll(x => x.Level != sly.buildresult.ErrorLevel.WARN).Count != 0) { WriteLineColor($"Could not build lexer!", Red); foreach (sly.buildresult.InitializationError e in built.Errors) { if (e.Level == sly.buildresult.ErrorLevel.WARN) { continue; } WriteLineColor($"\t{e.GetType()} level {e.Level}: {e.Message}", Red); } lexer = null; } else { // Lexer was build successfully lexer = built.Result; } }
protected virtual BuildResult <ILexer <IN> > BuildLexer(BuildExtension <IN> extensionBuilder = null) { var lexer = LexerBuilder.BuildLexer(new BuildResult <ILexer <IN> >(), extensionBuilder); return(lexer); }
protected BuildResult <ILexer <IN> > BuildLexer() { var lexer = LexerBuilder.BuildLexer(new BuildResult <ILexer <IN> >()); return(lexer); }