예제 #1
0
 public void Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
 {
     if (tokenizer.Type == QueryTokenizer.TokenType.StatementTerminator)
     {
         if (CurrentTermGroup.stackCount > 2)
         {
             ReportPattern(query);
             SucceedingPatternTermsAreNotRequired();
         }
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.ValueDelimiter)
     {
         ReportPatternRetainSubjectAndPredicate(query);
         SucceedingPatternTermsAreNotRequired();
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.PredicateDelimiter)
     {
         ReportPatternRetainSubject(query);
         SucceedingPatternTermsAreNotRequired();
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.LiteralLanguage)
     {
         PatternTerm member = CurrentTermGroup.popGraphMember();
         CurrentTermGroup.pushGraphMember(new PlainLiteral(member.GetLabel(), tokenizer.TokenText));
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.LiteralDatatype)
     {
         PatternTerm member = CurrentTermGroup.popGraphMember();
         CurrentTermGroup.pushGraphMember(new TypedLiteral(member.GetLabel(), tokenizer.TokenText));
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordFilter)
     {
         EndTermGroup(query);
         ParseFilter(tokenizer, PatternGroup);
         StartTermGroup( );
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.BeginGeneratedBlankNode)
     {
         BlankNode node = new BlankNode();
         CurrentTermGroup.pushGraphMember(node);
         StartBlankNodeGroup();
         CurrentTermGroup.pushGraphMember(node);
     }
     else if (tokenizer.Type == QueryTokenizer.TokenType.EndGeneratedBlankNode)
     {
         EndTermGroup(query);
     }
     else
     {
         PatternTerm subjectMember = parser.ParseTokenToMember(query);
         CurrentTermGroup.pushGraphMember(subjectMember);
         SucceedingPatternTermsAreRequired();
     }
 }
예제 #2
0
        public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
        {
            if (Explain)
            {
                Console.WriteLine("Entering PREFIXES state");
            }
            do
            {
                if (Explain)
                {
                    Console.WriteLine("  tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")");
                }
                switch (tokenizer.Type)
                {
                case QueryTokenizer.TokenType.PrefixName:
                    string prefix = tokenizer.TokenText;
                    if (tokenizer.MoveNext())
                    {
                        if (tokenizer.Type == QueryTokenizer.TokenType.QuotedIRIRef)
                        {
                            parser.RegisterPrefix(prefix, tokenizer.TokenText);
                        }
                        else
                        {
                            throw new SparqlException("Error parsing prefix declaration '" + prefix + "' at character " + tokenizer.TokenAbsolutePosition + ". The value of this prefix should be an IRI, but parser found '" + tokenizer.TokenText + "' instead");
                        }
                    }
                    else
                    {
                        throw new SparqlException("Error parsing prefix declaration '" + prefix + "' at character " + tokenizer.TokenAbsolutePosition + ". The query appears to be truncated after this declaration - no further content was found.");
                    }
                    break;

                case QueryTokenizer.TokenType.KeywordSelect:
                    query.ResultForm = Query.ResultFormType.Select;
                    return(new SelectState());

                case QueryTokenizer.TokenType.KeywordConstruct:
                    query.ResultForm = Query.ResultFormType.Construct;
                    return(new ConstructState());

                case QueryTokenizer.TokenType.Comment:
                    break;

                case QueryTokenizer.TokenType.KeywordPrefix:
                    break;

                default:
                    throw new SparqlException("Expecting a prefix declaration. Got '" + tokenizer.TokenText + "' at character " + tokenizer.TokenAbsolutePosition + ".");
                }
            } while (tokenizer.MoveNext());
            return(this);
        }
예제 #3
0
        public void ParseInto(Query query, String sparql)
        {
            InitializeParserState();
            itsState.Explain = Explain;

            itsTokenEnum = TokenizeQuery(sparql);
            while (itsTokenEnum.MoveNext())
            {
                string token = (string)itsTokenEnum.TokenText;
                if (Explain)
                {
                    Console.WriteLine("  itsTokenEnum.TokenText is " + token + " (" + itsTokenEnum.Type + ")");
                }

                itsState         = itsState.Handle(this, itsTokenEnum, query);
                itsState.Explain = Explain;
            }
        }
예제 #4
0
            private void  ParseFilter(QueryTokenizer tokenizer, PatternGroup group)
            {
                while (tokenizer.MoveNext())
                {
                    switch (tokenizer.Type)
                    {
                    case QueryTokenizer.TokenType.BeginGroup:
                        break;

                    case QueryTokenizer.TokenType.EndGroup:
                        return;

                    case QueryTokenizer.TokenType.Variable:
                        Constraint constraint = new Constraint(new VariableExpression(new Variable(tokenizer.TokenText)));
                        group.AddConstraint(constraint);
                        break;
                    }
                }
            }
예제 #5
0
        public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
        {
            if (Explain)
            {
                Console.WriteLine("Entering DESCRIBE state");
            }
            do
            {
                if (Explain)
                {
                    Console.WriteLine("  tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")");
                }
                switch (tokenizer.Type)
                {
                case QueryTokenizer.TokenType.KeywordWhere:
                    if (tokenizer.MoveNext())
                    {
                        if (tokenizer.TokenText.Equals("{"))
                        {
                            return(new WhereState());
                        }
                    }
                    break;

                case QueryTokenizer.TokenType.QuotedIRIRef:
                    query.AddDescribeTerm(new UriRef(tokenizer.TokenText));
                    break;

                case QueryTokenizer.TokenType.Variable:
                    Variable var = new Variable(tokenizer.TokenText);
                    query.AddVariable(var);
                    query.AddDescribeTerm(var);
                    break;

                default:
                    break;
                }
            } while (tokenizer.MoveNext());
            return(this);
        }
예제 #6
0
        public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
        {
            if (Explain)
            {
                Console.WriteLine("Entering SELECT state");
            }
            do
            {
                if (Explain)
                {
                    Console.WriteLine("  tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")");
                }
                switch (tokenizer.Type)
                {
                case QueryTokenizer.TokenType.KeywordWhere:
                    if (tokenizer.MoveNext())
                    {
                        if (tokenizer.TokenText.Equals("{"))
                        {
                            return(new WhereState());
                        }
                    }
                    break;

                case QueryTokenizer.TokenType.Variable:
                    query.AddVariable(new Variable(tokenizer.TokenText));
                    break;

                case QueryTokenizer.TokenType.Wildcard:
                    query.SelectAll = true;
                    break;

                case QueryTokenizer.TokenType.BeginGroup:
                    return(new WhereState());
                }
            } while (tokenizer.MoveNext());

            return(this);
        }
예제 #7
0
 public SparqlException(string message, QueryTokenizer tokenizer) : base(message)
 {
     itsPositionInQuery = tokenizer.TokenAbsolutePosition;
     itsPositionInLine  = tokenizer.TokenLinePosition;
     itsLineInQuery     = tokenizer.TokenLine;
 }
예제 #8
0
파일: State.cs 프로젝트: Titan512/spiralrdf
 public abstract State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query);
예제 #9
0
        public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
        {
            if (Explain)
            {
                Console.WriteLine("Entering WHERE state");
            }
            StartPatternGroup();
            query.PatternGroup = itsPatternCollector.PatternGroup;

            do
            {
                if (Explain)
                {
                    Console.WriteLine("  tokenizer.TokenText is " + tokenizer.TokenText + " (" + tokenizer.Type + ")");
                }

                if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup)
                {
                    itsPatternCollector.EndTermGroup(query);
                    itsPatternCollector.IncrementNestingDepth();
                }
                else if (tokenizer.Type == QueryTokenizer.TokenType.EndGroup)
                {
                    if (itsPatternCollector.HasNestedGroups)
                    {
                        itsPatternCollector.EndTermGroup(query);
                        itsPatternCollector.DecrementNestingDepth();
                    }
                    else
                    {
                        EndPatternGroup(query);
                        if (itsPatternCollectorStack.Count == 0)
                        {
                            return(new SolutionModifierState());
                        }
                        else
                        {
                            itsPatternCollector.StartTermGroup();
                        }
                    }
                }
                else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordOptional)
                {
                    if (tokenizer.MoveNext())
                    {
                        if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup)
                        {
                            itsPatternCollector.EndTermGroup(query);
                            StartOptionalPatternGroup();
                        }
                        else
                        {
                            throw new SparqlException("Error parsing OPTIONAL declaration, expecting { but encountered '" + tokenizer.TokenText + "'", tokenizer);
                        }
                    }
                    else
                    {
                        throw new SparqlException("Error parsing OPTIONAL declaration, expecting { but encountered end of query", tokenizer);
                    }
                }
                else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordUnion)
                {
                    if (tokenizer.MoveNext())
                    {
                        if (tokenizer.Type == QueryTokenizer.TokenType.BeginGroup)
                        {
                            itsPatternCollector.EndTermGroup(query);
                            StartAlternatePatternGroup();
                        }
                        else
                        {
                            throw new SparqlException("Error parsing UNION declaration, expecting { but encountered '" + tokenizer.TokenText + "'", tokenizer);
                        }
                    }
                    else
                    {
                        throw new SparqlException("Error parsing UNION declaration, expecting { but encountered end of query", tokenizer);
                    }
                }
                else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordGraph)
                {
                    //TODO
                }
                else
                {
                    itsPatternCollector.Handle(parser, tokenizer, query);
                }
            } while (tokenizer.MoveNext());

            return(this);
        }
예제 #10
0
 public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
 {
     return(this);
 }
예제 #11
0
        public override State Handle(QueryParser parser, QueryTokenizer tokenizer, Query query)
        {
            switch (tokenizer.Type)
            {
            case QueryTokenizer.TokenType.KeywordOrder:
                if (tokenizer.MoveNext())
                {
                    if (tokenizer.Type == QueryTokenizer.TokenType.KeywordBy)
                    {
                        if (tokenizer.MoveNext())
                        {
                            if (tokenizer.Type == QueryTokenizer.TokenType.KeywordAsc)
                            {
                                query.OrderDirection = Query.SortOrder.Ascending;
                                if (!tokenizer.MoveNext())
                                {
                                    throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer);
                                }
                            }
                            else if (tokenizer.Type == QueryTokenizer.TokenType.KeywordDesc)
                            {
                                query.OrderDirection = Query.SortOrder.Descending;
                                if (!tokenizer.MoveNext())
                                {
                                    throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer);
                                }
                            }

                            Expression orderExpression = parser.ParseExpression( );
                            query.OrderBy = orderExpression;
                        }
                        else
                        {
                            throw new SparqlException("Error parsing ORDER BY declaration, missing ordering expression", tokenizer);
                        }
                    }
                    else
                    {
                        throw new SparqlException("Error parsing ORDER BY declaration, expecting BY keyword but encountered '" + tokenizer.TokenText + "'", tokenizer);
                    }
                }
                else
                {
                    throw new SparqlException("Error parsing ORDER BY declaration, expecting BY keyword but encountered end of query", tokenizer);
                }

                break;

            case QueryTokenizer.TokenType.BeginGroup:
                break;

            case QueryTokenizer.TokenType.KeywordLimit:
                if (tokenizer.MoveNext())
                {
                    if (tokenizer.Type == QueryTokenizer.TokenType.NumericInteger)
                    {
                        query.ResultLimit = Convert.ToInt32(tokenizer.TokenText);
                        break;
                    }
                    else
                    {
                        throw new SparqlException("Error parsing LIMIT declaration, expecting integer but encountered '" + tokenizer.TokenText + "'", tokenizer);
                    }
                }
                else
                {
                    throw new SparqlException("Error parsing LIMIT declaration, expecting number but encountered end of query", tokenizer);
                }

            case QueryTokenizer.TokenType.KeywordOffset:
                if (tokenizer.MoveNext())
                {
                    if (tokenizer.Type == QueryTokenizer.TokenType.NumericInteger)
                    {
                        query.ResultOffset = Convert.ToInt32(tokenizer.TokenText);
                        break;
                    }
                    else
                    {
                        throw new SparqlException("Error parsing OFFSET declaration, expecting integer but encountered '" + tokenizer.TokenText + "'", tokenizer);
                    }
                }
                else
                {
                    throw new SparqlException("Error parsing OFFSET declaration, expecting number but encountered end of query", tokenizer);
                }
            }
            return(this);
        }