public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering CONSTRUCT state"); } do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } switch (tokenizer.Type) { case QueryTokenizer.TokenType.KeywordWhere: if (tokenizer.MoveNext()) { if (tokenizer.TokenText.Equals("{")) { return(new WhereState()); } } break; default: break; } } while (tokenizer.MoveNext()); return(this); }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering PROLOG state"); } do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } switch (tokenizer.Type) { case QueryTokenizer.TokenType.KeywordSelect: query.ResultForm = Query.ResultFormType.Select; return(new SelectState()); case QueryTokenizer.TokenType.KeywordConstruct: query.ResultForm = Query.ResultFormType.Construct; return(new ConstructState()); case QueryTokenizer.TokenType.KeywordDescribe: query.ResultForm = Query.ResultFormType.Describe; return(new DescribeState()); case QueryTokenizer.TokenType.KeywordPrefix: return(new PrefixesState()); case QueryTokenizer.TokenType.KeywordBase: if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.QuotedIRIRef) { try { query.Base = tokenizer.TokenText; } catch (UriFormatException e) { throw new SparqlException("Malformed URI found in base declaration", tokenizer, e); } } else { throw new SparqlException("Error parsing base declaration at character " + tokenizer.TokenAbsolutePosition + ". The value of base should be an IRI, but parser found '" + tokenizer.TokenText + "' instead"); } } else { throw new SparqlException("Error parsing base declaration at character " + tokenizer.TokenAbsolutePosition + ". The query appears to be truncated after this declaration - no further content was found."); } break; } } while (tokenizer.MoveNext()); return(this); }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering PREFIXES state"); } do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } switch (tokenizer.Type) { case QueryTokenizer.TokenType.PrefixName: string prefix = tokenizer.TokenText; if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.QuotedIRIRef) { parser.RegisterPrefix(prefix, tokenizer.TokenText); } else { throw new SparqlException("Error parsing prefix declaration '" + prefix + "' at character " + tokenizer.TokenAbsolutePosition + ". The value of this prefix should be an IRI, but parser found '" + tokenizer.TokenText + "' instead"); } } else { throw new SparqlException("Error parsing prefix declaration '" + prefix + "' at character " + tokenizer.TokenAbsolutePosition + ". The query appears to be truncated after this declaration - no further content was found."); } break; case QueryTokenizer.TokenType.KeywordSelect: query.ResultForm = Query.ResultFormType.Select; return(new SelectState()); case QueryTokenizer.TokenType.KeywordConstruct: query.ResultForm = Query.ResultFormType.Construct; return(new ConstructState()); case QueryTokenizer.TokenType.Comment: break; case QueryTokenizer.TokenType.KeywordPrefix: break; default: throw new SparqlException("Expecting a prefix declaration. Got '" + tokenizer.TokenText + "' at character " + tokenizer.TokenAbsolutePosition + "."); } } while (tokenizer.MoveNext()); return(this); }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering DESCRIBE state"); } do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } switch (tokenizer.Type) { case QueryTokenizer.TokenType.KeywordWhere: if (tokenizer.MoveNext()) { if (tokenizer.TokenText.Equals("{")) { return(new WhereState()); } } break; case QueryTokenizer.TokenType.QuotedIRIRef: query.AddDescribeTerm(new UriRef(tokenizer.TokenText)); break; case QueryTokenizer.TokenType.Variable: Variable var = new Variable(tokenizer.TokenText); query.AddVariable(var); query.AddDescribeTerm(var); break; default: break; } } while (tokenizer.MoveNext()); return(this); }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering SELECT state"); } do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } switch (tokenizer.Type) { case QueryTokenizer.TokenType.KeywordWhere: if (tokenizer.MoveNext()) { if (tokenizer.TokenText.Equals("{")) { return(new WhereState()); } } break; case QueryTokenizer.TokenType.Variable: query.AddVariable(new Variable(tokenizer.TokenText)); break; case QueryTokenizer.TokenType.Wildcard: query.SelectAll = true; break; case QueryTokenizer.TokenType.BeginGroup: return(new WhereState()); } } while (tokenizer.MoveNext()); return(this); }
public void ParseInto(Query query, String sparql) { InitializeParserState(); itsState.Explain = Explain; itsTokenEnum = TokenizeQuery(sparql); while (itsTokenEnum.MoveNext()) { string token = (string)itsTokenEnum.TokenText; if (Explain) { Console.WriteLine(" itsTokenEnum.TokenText is " + token + " (" + itsTokenEnum.Type + ")"); } itsState = itsState.Handle(this, itsTokenEnum, query); itsState.Explain = Explain; } }
private void ParseFilter(QueryTokenizer tokenizer, PatternGroup group) { while (tokenizer.MoveNext()) { switch (tokenizer.Type) { case QueryTokenizer.TokenType.BeginGroup: break; case QueryTokenizer.TokenType.EndGroup: return; case QueryTokenizer.TokenType.Variable: Constraint constraint = new Constraint(new VariableExpression(new Variable(tokenizer.TokenText))); group.AddConstraint(constraint); break; } } }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { if (Explain) { Console.WriteLine("Entering WHERE state"); } StartPatternGroup(); query.PatternGroup = itsPatternCollector.PatternGroup; do { if (Explain) { Console.WriteLine(" tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")"); } if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup) { itsPatternCollector.EndTermGroup(query); itsPatternCollector.IncrementNestingDepth(); } else if (tokenizer.Type == QueryTokenizer.TokenType.EndGroup) { if (itsPatternCollector.HasNestedGroups) { itsPatternCollector.EndTermGroup(query); itsPatternCollector.DecrementNestingDepth(); } else { EndPatternGroup(query); if (itsPatternCollectorStack.Count == 0) { return(new SolutionModifierState()); } else { itsPatternCollector.StartTermGroup(); } } } else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordOptional) { if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup) { itsPatternCollector.EndTermGroup(query); StartOptionalPatternGroup(); } else { throw new SparqlException("Error parsing OPTIONAL declaration, expecting { but encountered '" + tokenizer.TokenText + "'", tokenizer); } } else { throw new SparqlException("Error parsing OPTIONAL declaration, expecting { but encountered end of query", tokenizer); } } else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordUnion) { if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup) { itsPatternCollector.EndTermGroup(query); StartAlternatePatternGroup(); } else { throw new SparqlException("Error parsing UNION declaration, expecting { but encountered '" + tokenizer.TokenText + "'", tokenizer); } } else { throw new SparqlException("Error parsing UNION declaration, expecting { but encountered end of query", tokenizer); } } else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordGraph) { //TODO } else { itsPatternCollector.Handle(parser, tokenizer, query); } } while (tokenizer.MoveNext()); return(this); }
public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query) { switch (tokenizer.Type) { case QueryTokenizer.TokenType.KeywordOrder: if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.KeywordBy) { if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.KeywordAsc) { query.OrderDirection = Query.SortOrder.Ascending; if (!tokenizer.MoveNext()) { throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer); } } else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordDesc) { query.OrderDirection = Query.SortOrder.Descending; if (!tokenizer.MoveNext()) { throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer); } } Expression orderExpression = parser.ParseExpression( ); query.OrderBy = orderExpression; } else { throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer); } } else { throw new SparqlException("Error parsing ORDER BY declaration, expecting BY keyword but encountered '" + tokenizer.TokenText + "'", tokenizer); } } else { throw new SparqlException("Error parsing ORDER BY declaration, expecting BY keyword but encountered end of query", tokenizer); } break; case QueryTokenizer.TokenType.BeginGroup: break; case QueryTokenizer.TokenType.KeywordLimit: if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.NumericInteger) { query.ResultLimit = Convert.ToInt32(tokenizer.TokenText); break; } else { throw new SparqlException("Error parsing LIMIT declaration, expecting integer but encountered '" + tokenizer.TokenText + "'", tokenizer); } } else { throw new SparqlException("Error parsing LIMIT declaration, expecting number but encountered end of query", tokenizer); } case QueryTokenizer.TokenType.KeywordOffset: if (tokenizer.MoveNext()) { if (tokenizer.Type == QueryTokenizer.TokenType.NumericInteger) { query.ResultOffset = Convert.ToInt32(tokenizer.TokenText); break; } else { throw new SparqlException("Error parsing OFFSET declaration, expecting integer but encountered '" + tokenizer.TokenText + "'", tokenizer); } } else { throw new SparqlException("Error parsing OFFSET declaration, expecting number but encountered end of query", tokenizer); } } return(this); }