static void Main(string[] args) { try { Console.WriteLine($"Checking filter: {Input}"); // Create the Antlr Lexer and Tokenizer AntlrInputStream inputStream = new AntlrInputStream(Input); QueryLexer queryLexer = new QueryLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(queryLexer); QueryParser queryParser = new QueryParser(commonTokenStream); // Invoke the visitor pattern on the parsed AST IParseTree context = queryParser.query(); QueryVisitor visitor = new QueryVisitor(); var filteredData = visitor.Visit(context) as List <Dictionary <string, string> >; foreach (var item in filteredData) { Console.WriteLine($"Name:{item["name"]}, Age:{item["age"]}, Eye Colour:{item["eye_colour"]}"); } } catch (Exception ex) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Error: " + ex.Message); } }
public void MultipleTermsWithPresenceAndFuzz() { QueryLexer lexer = Lex("+foo~1 +bar"); Assert.Equal(5, lexer.Lexemes.Count); Lexeme fooPresenceLexeme = lexer.Lexemes[0]; Lexeme fooLexeme = lexer.Lexemes[1]; Lexeme fooFuzzLexeme = lexer.Lexemes[2]; Assert.Equal(LexemeType.Presence, fooPresenceLexeme.Type); Assert.Equal(LexemeType.Term, fooLexeme.Type); Assert.Equal(LexemeType.EditDistance, fooFuzzLexeme.Type); Assert.Equal("+", fooPresenceLexeme.Value); Assert.Equal("foo", fooLexeme.Value); Assert.Equal("1", fooFuzzLexeme.Value); Lexeme barPresenceLexeme = lexer.Lexemes[3]; Lexeme barLexeme = lexer.Lexemes[4]; Assert.Equal(LexemeType.Presence, barPresenceLexeme.Type); Assert.Equal(LexemeType.Term, barLexeme.Type); Assert.Equal("+", barPresenceLexeme.Value); Assert.Equal("bar", barLexeme.Value); }
internal IExecutableQuery Compile( TextReader inputQuery, IQueryErrorListener queryErrorListener, QueryCompilationParameters parameters) { // create all necessary components to parse a query var input = new AntlrInputStream(inputQuery); var lexer = new QueryLexer(input); lexer.AddErrorListener(new LexerErrorListener(queryErrorListener)); var compiler = new QueryCompilationListener( _queryFactory, this, queryErrorListener, _runtime, parameters); var tokenStream = new CommonTokenStream(lexer); var errorListener = new ParserErrorListener(queryErrorListener); var parser = new QueryParser(tokenStream); parser.AddErrorListener(errorListener); // parse and compile the query if (!parameters.IsRecursiveCall) { queryErrorListener.BeforeCompilation(); } IQuery result; try { var tree = parser.entry(); ParseTreeWalker.Default.Walk(compiler, tree); result = compiler.Finish(); if (result == null) { return(null); } result = result.WithText(input.ToString()); } catch (ParseCanceledException) { // an error has already been reported return(null); } finally { if (!parameters.IsRecursiveCall) { queryErrorListener.AfterCompilation(); } } return(result); }
public void TermWithFieldWithPresenceRequired() { QueryLexer lexer = Lex("+title:foo"); Assert.Equal(3, lexer.Lexemes.Count); Lexeme presenceLexeme = lexer.Lexemes[0]; Lexeme fieldLexeme = lexer.Lexemes[1]; Lexeme termLexeme = lexer.Lexemes[2]; Assert.Equal(LexemeType.Presence, presenceLexeme.Type); Assert.Equal("+", presenceLexeme.Value); Assert.Equal(0, presenceLexeme.Start); Assert.Equal(1, presenceLexeme.End); Assert.Equal(LexemeType.Field, fieldLexeme.Type); Assert.Equal("title", fieldLexeme.Value); Assert.Equal(1, fieldLexeme.Start); Assert.Equal(6, fieldLexeme.End); Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(7, termLexeme.Start); Assert.Equal(10, termLexeme.End); }
private QueryLexer Lex(string str) { var lexer = new QueryLexer(str); lexer.Run(); return(lexer); }
public MainWindow() { InitializeComponent(); var xshdAsString = ManifestResource.Load("RavenQuery.SyntaxTester.RQLSyntaxHighlighting.xshd"); using (var reader = new XmlTextReader(new StringReader(xshdAsString))) { var editorSyntaxHighlighting = HighlightingLoader.Load(reader, HighlightingManager.Instance); HighlightingManager.Instance.RegisterHighlighting("RQL", new[] { ".rql" }, editorSyntaxHighlighting); } CodeEditor.SyntaxHighlighting = HighlightingManager.Instance.GetDefinition("RQL"); _lexer = new QueryLexer(null); _parser = new QueryParser(null); _errorStrategy = new UserFriendlyErrorStrategy(); _parser.ErrorHandler = _errorStrategy; _parser.AddErrorListener(_errorListener); Observable.FromEventPattern( ev => CodeEditor.TextChanged += ev, ev => CodeEditor.TextChanged -= ev) .Throttle(TimeSpan.FromMilliseconds(750)) .Subscribe(_ => Dispatcher.InvokeAsync(ParseRQL)); InitializeTextMarkerService(); }
public void TermWithFieldBoostAndEditDistancez() { QueryLexer lexer = Lex("title:foo^10~5"); Assert.Equal(4, lexer.Lexemes.Count); Lexeme fieldLexeme = lexer.Lexemes[0]; Lexeme termLexeme = lexer.Lexemes[1]; Lexeme boostLexeme = lexer.Lexemes[2]; Lexeme editDistanceLexeme = lexer.Lexemes[3]; Assert.Equal(LexemeType.Field, fieldLexeme.Type); Assert.Equal("title", fieldLexeme.Value); Assert.Equal(0, fieldLexeme.Start); Assert.Equal(5, fieldLexeme.End); Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(6, termLexeme.Start); Assert.Equal(9, termLexeme.End); Assert.Equal(LexemeType.Boost, boostLexeme.Type); Assert.Equal("10", boostLexeme.Value); Assert.Equal(10, boostLexeme.Start); Assert.Equal(12, boostLexeme.End); Assert.Equal(LexemeType.EditDistance, editDistanceLexeme.Type); Assert.Equal("5", editDistanceLexeme.Value); Assert.Equal(13, editDistanceLexeme.Start); Assert.Equal(14, editDistanceLexeme.End); }
public IStateCollector Create(string query, int position) { var lexer = new QueryLexer(new AntlrInputStream(new StringReader(query))); var tokenStream = new CommonTokenStream(new CaretTokenSource(lexer, position, new[] { QueryLexer.ADD_SUB, QueryLexer.MULT_DIV, QueryLexer.REL_OP, QueryLexer.LPAREN, QueryLexer.RPAREN })); // we will need a random access to the input as we traverse the ATN var tokens = new List <IToken>(); for (; ;) { var token = tokenStream.LT(1); tokenStream.Consume(); tokens.Add(token); if (token.Type == Lexer.Eof || token.Type == CaretToken.TokenType) { break; } } Trace.Assert(tokens.Count >= 1, "tokens.Count >= 1"); // CARET Trace.Assert( tokens.Last().Type == CaretToken.TokenType, "tokens.Last().Type == CaretToken.TokenType"); var parser = new QueryParser(tokenStream); return(new StateCollector(tokens, parser)); }
/// <summary> /// Check if <paramref name="identifier"/> is a simple identifier (i.e., token ID) /// </summary> /// <param name="identifier">Tested identifier</param> /// <returns> /// true iff <paramref name="identifier"/> is a simple identifier (it matches the ID token) /// </returns> private static bool IsSimpleIdentifier(string identifier) { // this is not a very nice way to do it but at least we don't have to duplicate // identifier regex in the code var lexer = new QueryLexer(new AntlrInputStream(new StringReader(identifier))); var id = lexer.NextToken().Type; var eof = lexer.NextToken().Type; return(id == QueryLexer.ID && eof == QueryLexer.Eof); }
public void AutoTestCase_Expr_TestPredicateTokenizer8() { Value args = new Value(); args.PushBack(Value.StringValue("expr 'foo and bar'")); QueryLexer tokens = new QueryLexer(args.AsSequence, false); Assert.Equal(QueryLexerTokenKind.TOK_EXPR, tokens.NextToken().Kind); Assert.Equal(QueryLexerTokenKind.TERM, tokens.NextToken().Kind); Assert.Equal(QueryLexerTokenKind.END_REACHED, tokens.NextToken().Kind); }
public static QueryNode Parse(string query) { if (string.IsNullOrWhiteSpace(query)) { return(new ConstantNode("*")); } using (var lexer = new QueryLexer(new SourceStreamEnumerator(query.ToStream()))) { var parser = new QueryParser(lexer); return(parser.Result); } }
public void SingleTermProducesOneLexeme() { QueryLexer lexer = Lex("foo"); Assert.Single(lexer.Lexemes); Lexeme lexeme = lexer.Lexemes.First(); Assert.Equal(LexemeType.Term, lexeme.Type); Assert.Equal("foo", lexeme.Value); Assert.Equal(0, lexeme.Start); Assert.Equal(3, lexeme.End); }
public void TermEscapeCharProducesOneLexeme() { QueryLexer lexer = Lex(@"foo\:bar"); Assert.Single(lexer.Lexemes); Lexeme lexeme = lexer.Lexemes.First(); Assert.Equal(LexemeType.Term, lexeme.Type); Assert.Equal("foo:bar", lexeme.Value); Assert.Equal(0, lexeme.Start); Assert.Equal(8, lexeme.End); }
public void Parse_ShouldNormalParseEmpty() { // Arrange var lexer = new QueryLexer(); var input = @""; var output = new object[] {}; // Act var result = lexer.Tokenize(input); // Assert Assert.True(Enumerable.SequenceEqual(output, result)); }
public void Parse_ShouldNormalParseText2() { // Arrange var lexer = new QueryLexer(); var input = @"Esc\a\\ping\#\.Test"; var output = new object[] { @"Esca\ping#.Test" }; // Act var result = lexer.Tokenize(input); // Assert Assert.True(Enumerable.SequenceEqual(output, result)); }
public void ValidateQuery(String query) { ANTLRStringStream string_stream = new ANTLRStringStream(query); QueryLexer lexer = new QueryLexer(string_stream); CommonTokenStream tokens = new CommonTokenStream(lexer); QueryParser parser = new QueryParser(tokens); QueryParser.statement_return obj = parser.statement(); Statement StatementObject = obj.ret; if (StatementObject is CreateTable) { CreateTable mod = (CreateTable)StatementObject; } }
public static QueryParser.ProgramContext Parse(string query) { var lexer = new QueryLexer(new AntlrInputStream(query)); lexer.RemoveErrorListeners(); lexer.AddErrorListener(new ThrowingErrorListener <int>()); var parser = new QueryParser(new CommonTokenStream(lexer)); parser.RemoveErrorListeners(); parser.AddErrorListener(new ThrowingErrorListener <IToken>()); return(parser.program()); }
public void AutoTestCase_Expr_TestPredicateTokenizer1() { Value args = new Value(); args.PushBack(Value.StringValue("foo")); args.PushBack(Value.StringValue("and")); args.PushBack(Value.StringValue("bar")); QueryLexer tokens = new QueryLexer(args.AsSequence); Assert.Equal(QueryLexerTokenKind.TERM, tokens.NextToken().Kind); Assert.Equal(QueryLexerTokenKind.TOK_AND, tokens.NextToken().Kind); Assert.Equal(QueryLexerTokenKind.TERM, tokens.NextToken().Kind); Assert.Equal(QueryLexerTokenKind.END_REACHED, tokens.NextToken().Kind); }
internal virtual T QueryParse <T>(ref string s, Func <QueryParser, T> func) where T : RuleContext { var stream = new AntlrInputStream(s); var lexer = new QueryLexer(stream); lexer.RemoveErrorListeners(); var tokens = new CommonTokenStream(lexer); var parser = new QueryParser(tokens) { ErrorHandler = new BailErrorStrategy() }; parser.RemoveErrorListeners(); return(Check(ref s, func(parser))); }
public void Can_find_suggestions_after_from() { var input = "from"; var lexer = new QueryLexer(new CaseInsensitiveInputStream(input)); var parser = new QueryParser(new CommonTokenStream(lexer)); var suggester = new TokenSuggester(parser); Assert.NotEmpty(suggester.Suggest(1)); //sanity check var tokenNames = suggester.Suggestions.Select(type => lexer.Vocabulary.GetSymbolicName(type)).ToArray(); Assert.Equal(3, suggester.Suggestions.Count); Assert.Contains("IDENTIFIER", tokenNames); Assert.Contains("STRING", tokenNames); Assert.Contains("INDEX", tokenNames); Assert.Contains("ALL_DOCS", tokenNames); }
public void Can_find_suggestions_at_empty_input() { var input = string.Empty; var lexer = new QueryLexer(new CaseInsensitiveInputStream(input)); var parser = new QueryParser(new CommonTokenStream(lexer)); var suggester = new TokenSuggester(parser); Assert.NotEmpty(suggester.Suggest(0)); //sanity check var tokenNames = suggester.Suggestions.Select(type => lexer.Vocabulary.GetSymbolicName(type)).ToArray(); Assert.Equal(4, suggester.Suggestions.Count); Assert.Contains("MATCH", tokenNames); Assert.Contains("FROM", tokenNames); Assert.Contains("WITH", tokenNames); Assert.Contains("DECLARES_FUNCTION", tokenNames); }
public void AutoTestCase_Expr_TestPredicateTokenizer5() { Value args = new Value(); args.PushBack(Value.StringValue("( foo and")); args.PushBack(Value.StringValue("bar)")); QueryLexer tokens = new QueryLexer(args.AsSequence, false); Assert.AreEqual(QueryLexerTokenKind.LPAREN, tokens.NextToken().Kind); Assert.AreEqual(QueryLexerTokenKind.TERM, tokens.NextToken().Kind); Assert.AreEqual(QueryLexerTokenKind.TOK_AND, tokens.NextToken().Kind); Assert.AreEqual(QueryLexerTokenKind.TERM, tokens.NextToken().Kind); Assert.AreEqual(QueryLexerTokenKind.RPAREN, tokens.NextToken().Kind); Assert.AreEqual(QueryLexerTokenKind.END_REACHED, tokens.NextToken().Kind); }
public void Parse_ShouldNormalParseText1() { // Arrange var lexer = new QueryLexer(); var input = @"GO[1]"; var output = new object[] { "GO", new QuerySpecialToken("["), 1, new QuerySpecialToken("]") }; // Act var result = lexer.Tokenize(input); // Assert Assert.True(Enumerable.SequenceEqual(output, result)); }
public void SingleTermWithHyphenProducesTwoLexemes() { QueryLexer lexer = Lex("foo-bar"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme fooLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Term, fooLexeme.Type); Assert.Equal("foo", fooLexeme.Value); Assert.Equal(0, fooLexeme.Start); Assert.Equal(3, fooLexeme.End); Lexeme barLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Term, barLexeme.Type); Assert.Equal("bar", barLexeme.Value); Assert.Equal(4, barLexeme.Start); Assert.Equal(7, barLexeme.End); }
public void TermWithPresenceProhibited() { QueryLexer lexer = Lex("-foo"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme presenceLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Presence, presenceLexeme.Type); Assert.Equal("-", presenceLexeme.Value); Assert.Equal(0, presenceLexeme.Start); Assert.Equal(1, presenceLexeme.End); Lexeme termLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(1, termLexeme.Start); Assert.Equal(4, termLexeme.End); }
public void TermWithEditDistance() { QueryLexer lexer = Lex("foo~2"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme termLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(0, termLexeme.Start); Assert.Equal(3, termLexeme.End); Lexeme editDistanceLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.EditDistance, editDistanceLexeme.Type); Assert.Equal("2", editDistanceLexeme.Value); Assert.Equal(4, editDistanceLexeme.Start); Assert.Equal(5, editDistanceLexeme.End); }
public void TermWithBoost() { QueryLexer lexer = Lex("foo^10"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme termLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(0, termLexeme.Start); Assert.Equal(3, termLexeme.End); Lexeme boostLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Boost, boostLexeme.Type); Assert.Equal("10", boostLexeme.Value); Assert.Equal(4, boostLexeme.Start); Assert.Equal(6, boostLexeme.End); }
public void MultipleTermsProduceTwoLexemes() { QueryLexer lexer = Lex("foo bar"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme fooLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Term, fooLexeme.Type); Assert.Equal("foo", fooLexeme.Value); Assert.Equal(0, fooLexeme.Start); Assert.Equal(3, fooLexeme.End); Lexeme barLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Term, barLexeme.Type); Assert.Equal("bar", barLexeme.Value); Assert.Equal(4, barLexeme.Start); Assert.Equal(7, barLexeme.End); }
public void TermWithField() { QueryLexer lexer = Lex("title:foo"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme fieldLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Field, fieldLexeme.Type); Assert.Equal("title", fieldLexeme.Value); Assert.Equal(0, fieldLexeme.Start); Assert.Equal(5, fieldLexeme.End); Lexeme termLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(6, termLexeme.Start); Assert.Equal(9, termLexeme.End); }
public void TermWithFieldWithEscapeChar() { QueryLexer lexer = Lex(@"ti\:tle:foo"); Assert.Equal(2, lexer.Lexemes.Count); Lexeme fieldLexeme = lexer.Lexemes[0]; Assert.Equal(LexemeType.Field, fieldLexeme.Type); Assert.Equal("ti:tle", fieldLexeme.Value); Assert.Equal(0, fieldLexeme.Start); Assert.Equal(7, fieldLexeme.End); Lexeme termLexeme = lexer.Lexemes[1]; Assert.Equal(LexemeType.Term, termLexeme.Type); Assert.Equal("foo", termLexeme.Value); Assert.Equal(8, termLexeme.Start); Assert.Equal(11, termLexeme.End); }
public void Parse_ShouldNormalParseText() { // Arrange var lexer = new QueryLexer(); var input = "GameManager#Camera.fieldOfView"; var output = new object[] { "GameManager", new QuerySpecialToken("#"), "Camera", new QuerySpecialToken("."), "fieldOfView" }; // Act var result = lexer.Tokenize(input); // Assert Assert.True(Enumerable.SequenceEqual(output, result)); }
public Statement ValidateQuery(string query) { Statement ret = null; try { ANTLRStringStream string_stream = new ANTLRStringStream(query); QueryLexer lexer = new QueryLexer(string_stream); CommonTokenStream tokens = new CommonTokenStream(lexer); QueryParser parser = new QueryParser(tokens); QueryParser.statement_return obj = parser.statement(); ret = obj.ret; } catch (Antlr.Runtime.MismatchedTokenException ex) { string token = m_TokenProcessor.GetTokenName(ex.Expecting); if (token == null) token = "."; else token = token + "."; string msg = "Error in processing query. Missing or invalid statement. Expecting: " + token; QueryParserException exception = new QueryParserException(msg); throw exception; } catch (Antlr.Runtime.MismatchedTreeNodeException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.NoViableAltException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.EarlyExitException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.FailedPredicateException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.MismatchedRangeException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.MismatchedSetException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (Antlr.Runtime.RecognitionException ex) { QueryParserException exception = new QueryParserException(ex.Message); throw exception; } catch (QueryParserException ex) { throw ex; } return ret; }