/// <summary>
        /// Processes the queue as far as it needs to to generate a fully qualiffied
        /// <see cref="SyntaxNode" /> based on its ruleset.
        /// </summary>
        /// <param name="tokenStream">The token stream.</param>
        /// <returns>LexicalToken.</returns>
        public SyntaxNode MakeNode(TokenStream tokenStream)
        {
            tokenStream.MatchOrThrow(TokenType.CurlyBraceLeft);
            var startLocation = tokenStream.Location;

            var maker          = NodeMakerFactory.CreateMaker <InputItemCollectionNode>();
            var inputColection = maker.MakeNode(tokenStream);

            var collection = new ComplexValueNode(startLocation);

            collection.AddChild(inputColection);
            return(collection);
        }
示例#2
0
        /// <summary>
        /// Processes the queue as far as it needs to to generate a fully qualiffied
        /// <see cref="SyntaxNode" /> based on its ruleset.
        /// </summary>
        /// <param name="tokenStream">The token stream.</param>
        /// <returns>LexicalToken.</returns>
        public SyntaxNode MakeNode(TokenStream tokenStream)
        {
            tokenStream.MatchOrThrow <NameToken>();

            var startLocation = tokenStream.Location;
            var fieldName     = tokenStream.ActiveToken.Text;
            var fieldAlias    = fieldName;

            tokenStream.Next();
            SyntaxNode inputCollection = null;
            SyntaxNode fieldCollection = null;
            var        directives      = new List <SyntaxNode>();

            // account for a possible alias on the field name
            if (tokenStream.Match(TokenType.Colon))
            {
                tokenStream.Next();
                tokenStream.MatchOrThrow <NameToken>();

                fieldName = tokenStream.ActiveToken.Text;
                tokenStream.Next();
            }

            // account for possible collection of input values
            if (tokenStream.Match(TokenType.ParenLeft))
            {
                var maker = NodeMakerFactory.CreateMaker <InputItemCollectionNode>();
                inputCollection = maker.MakeNode(tokenStream);
            }

            // account for possible directives on this field
            while (tokenStream.Match(TokenType.AtSymbol))
            {
                var maker     = NodeMakerFactory.CreateMaker <DirectiveNode>();
                var directive = maker.MakeNode(tokenStream);
                directives.Add(directive);
            }

            // account for posible field collection on this field
            if (tokenStream.Match(TokenType.CurlyBraceLeft))
            {
                var maker = NodeMakerFactory.CreateMaker <FieldCollectionNode>();
                fieldCollection = maker.MakeNode(tokenStream);
            }

            var node = new FieldNode(startLocation, fieldAlias, fieldName);

            if (inputCollection != null)
            {
                node.AddChild(inputCollection);
            }

            foreach (var directive in directives)
            {
                node.AddChild(directive);
            }

            if (fieldCollection != null && fieldCollection.Children.Any())
            {
                node.AddChild(fieldCollection);
            }

            return(node);
        }
        /// <summary>
        /// Processes the queue as far as it needs to to generate a fully qualiffied
        /// <see cref="SyntaxNode" /> based on its ruleset.
        /// </summary>
        /// <param name="tokenStream">The token stream.</param>
        /// <returns>LexicalToken.</returns>
        public SyntaxNode MakeNode(TokenStream tokenStream)
        {
            tokenStream.MatchOrThrow(TokenType.SpreadOperator);
            var startLocation = tokenStream.Location;

            tokenStream.Next();

            SyntaxNode            node;
            SyntaxNode            collection       = null;
            ReadOnlyMemory <char> fragmentName     = ReadOnlyMemory <char> .Empty;
            ReadOnlyMemory <char> restrictedToType = ReadOnlyMemory <char> .Empty;
            var directives = new List <SyntaxNode>();

            // check for inline fragment first "on Type"
            if (tokenStream.Match(KEYWORDS.On))
            {
                tokenStream.Next();
                tokenStream.MatchOrThrow <NameToken>();
                restrictedToType = tokenStream.ActiveToken.Text;
                tokenStream.Next();
            }

            // might be a named fragment?
            if (tokenStream.Match <NameToken>())
            {
                fragmentName = tokenStream.ActiveToken.Text;
                tokenStream.Next();
            }

            // account for possible directives on this field
            while (tokenStream.Match(TokenType.AtSymbol))
            {
                var dirMaker  = NodeMakerFactory.CreateMaker <DirectiveNode>();
                var directive = dirMaker.MakeNode(tokenStream);
                directives.Add(directive);
            }

            // may contain a field set
            if (tokenStream.Match(TokenType.CurlyBraceLeft))
            {
                var filedColMaker = NodeMakerFactory.CreateMaker <FieldCollectionNode>();
                collection = filedColMaker.MakeNode(tokenStream);
            }

            if (fragmentName.IsEmpty && restrictedToType.IsEmpty && directives.Count == 0 && collection == null)
            {
                throw new GraphQLSyntaxException(
                          startLocation,
                          "Invalid fragment syntax. No fragment could be created from the supplied block.");
            }

            if (!fragmentName.IsEmpty)
            {
                node = new FragmentSpreadNode(startLocation, fragmentName);
            }
            else
            {
                node = new FragmentNode(startLocation, restrictedToType);
            }

            if (collection != null && collection.Children.Any())
            {
                node.AddChild(collection);
            }

            foreach (var directive in directives)
            {
                node.AddChild(directive);
            }

            return(node);
        }
        /// <summary>
        /// Takes in a raw query and converts into an executable document according to
        /// its internal rule set.  If, during parsing, an error occurs or something about
        /// the supplied query text is incorrect or unexpected a <see cref="GraphQLSyntaxException" />.
        /// </summary>
        /// <param name="queryText">The raw query text to be parsed.</param>
        /// <returns>The completed document.</returns>
        public ISyntaxTree ParseQueryDocument(ReadOnlyMemory <char> queryText)
        {
            // if an exception occurs during parsing just let it bubble up
            // the owner of the parse request will handle it accordingly
            // ...usually by return an error in the query result
            //
            // ----------------------------------
            // Step 1: Parse and Tokenization
            // -------
            // Attempt to take the text and turn it into a series of structurally valid
            // tokens according to the graph ql specification.
            //
            // Lexing provides the following garuntees
            // - - - - - - - - - - - -
            // 1) Strings are garunteed to be valid, properly delimited and with validly escaped chars
            //    1a) Delimiters are not removed; single-quotes or triple-quotes are still attached
            //    1b) escaped characters are still escaped and not yet converted to their real unicode characters
            // 2) Numbers are garunteed to be in a format dictated by the specification but have not been parsed as numbers
            // 3) All control characters are vetted as valid and syntactically correct
            //    3a) matching braces ARE NOT garunteed yet.
            // 4) Any unecessary whitespace or characters has already been removed
            //    4a) Non-relaivent "valid but useless' control characters (like commas) have not been removed
            // 5) Single line comments have been parsed but the delimiters have not been removed
            //    5a) All comments start with '#' and are always only one line
            // ----------------------------------
            var source      = new SourceText(queryText);
            var tokenStream = Lexer.Tokenize(source);
            var syntaxTree  = new SyntaxTree();

            // ----------------------------------
            // Step 2: Process the Document
            // -------
            // Analyze the tokens to ensure they form a real query and build the graph document as processing occurs.
            //
            // Examples of operations performed (not an exhaustive list)
            // * Ensure names and strings fall where they should
            // * Parens, brackets and brace are properly opened and closed
            // * received strings or numbers are paired correctly with their named values
            // * graph projections appear as they should (after an option)
            // * mutation, query and subscription definition rules are enforced
            // * variables are identified and proper reference is ensured
            // * ..and on and on
            // ----------------------------------
            while (!tokenStream.EndOfStream)
            {
                // offload processing of the queue to a specialized top-level-node "maker"
                // based on the keyword at the top of the stream .
                // Load the correct maker and let that process the stream to generate
                // the next segment from the source text, leaving the stream in a state such
                // that the next segment could be generated
                //
                // As a rule of thumb, ALL makers will ensure that the stream
                // is pointing at a token that indicates the start
                // of the node or nodes they are responsible to build
                // and that when finished, the stream is beyond the end of the
                // last known token that could be part of the node
                // the maker generates. This is usually a close brace
                // ---------
                // a fragment node MUST start with the keyword 'fragment'
                // see spec: section 2.8
                // ---------
                // a query or mutation MAY start with a keyword declaration
                // or skipping all that and variables, just be an open brace
                // see spec: section 2.3 "query shorthand"
                ISyntaxNodeMaker maker;
                if (tokenStream.Match(KEYWORDS.Fragment))
                {
                    maker = NodeMakerFactory.CreateMaker <NamedFragmentNode>();
                }
                else
                {
                    maker = NodeMakerFactory.CreateMaker <OperationTypeNode>();
                }

                var node = maker.MakeNode(tokenStream);
                syntaxTree.AddNode(node);
            }

            return(syntaxTree);
        }