public void Lexer_Tokenize_SimpleQuery_ReturnsExpectedTokens() { var qualifiedQuery = @"mutation { createHero( name: ""John"", age: 23 ) { name # Queries can have comments! friends { name } } }"; var source = new SourceText(qualifiedQuery.AsMemory()); var tokenSet = Lexer.Tokenize(source); // first two tokens should be control parens HelperAsserts.AssertTokenChain( tokenSet, new NameToken("mutation".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceLeft, SourceLocation.None), new NameToken("createHero".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.ParenLeft, SourceLocation.None), new NameToken("name".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.Colon, SourceLocation.None), new StringToken("\"John\"".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.Comma, SourceLocation.None), new NameToken("age".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.Colon, SourceLocation.None), NumberToken.FromSourceText("23".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.ParenRight, SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceLeft, SourceLocation.None), new NameToken("name".AsMemory(), SourceLocation.None), new CommentToken("# Queries can have comments!".AsMemory(), SourceLocation.None), new NameToken("friends".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceLeft, SourceLocation.None), new NameToken("name".AsMemory(), SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceRight, SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceRight, SourceLocation.None), ControlToken.FromType(TokenType.CurlyBraceRight, SourceLocation.None), EndOfFileToken.Instance); }
/// <summary> /// Converts a given source text into a stream of tokens that can be consumed to create a syntax tree. /// </summary> /// <param name="source">The source text to analyze.</param> /// <returns>TokenStream.</returns> public static TokenStream Tokenize(SourceText source) { var tokenSet = new TokenStream(source.Text); source.SkipWhitespace(); while (source.HasData) { SourceLocation location; // Comments // ------------------- if (source.CheckCursor(SR.IsCommentGlyph)) { var text = source.NextComment(out location); tokenSet.Enqueue(new CommentToken(text, location)); } // Flow Controler characer (non-text entities) // ------------------- else if (source.CheckCursor(SR.IsControlGlyph)) { var text = source.NextControlPhrase(out location); tokenSet.Enqueue(new ControlToken(text.ToTokenType(), text, location)); } // Named fields // --------------------------------- else if (source.CheckCursor(SR.IsStartOfNameGlyph)) { var text = source.NextName(out location); tokenSet.Enqueue(Lexer.CharactersToToken(text, location)); } // Numbers // ---------------------------------- else if (source.CheckCursor(SR.IsStartOfNumberGlyph)) { var text = source.NextNumber(out location); tokenSet.Enqueue(NumberToken.FromSourceText(text, location)); } // Strings // ---------------------------------- else if (source.CheckCursor(SR.IsStringDelimiterGlyph)) { var text = source.NextString(out location); tokenSet.Enqueue(new StringToken(text, location)); } else { // who the heck knows, just fail location = source.RetrieveCurrentLocation(); throw new GraphQLSyntaxException( location, $"Unexpected character: '{source.Peek()}'"); } source.SkipWhitespace(); } tokenSet.Enqueue(EndOfFileToken.Instance); return(tokenSet); }