public TSQLCaseExpression Parse(ITSQLTokenizer tokenizer) { TSQLCaseExpression caseExpression = new TSQLCaseExpression(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.CASE)) { throw new InvalidOperationException("CASE expected."); } caseExpression.Tokens.Add(tokenizer.Current); TSQLSubqueryHelper.ReadUntilStop( tokenizer, caseExpression, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.END }, lookForStatementStarts: false); // this is different than the other clauses because the // stop word is still part of the expression instead of // being part of the next expression or clause like in // the other parsers caseExpression.Tokens.Add(tokenizer.Current); return(caseExpression); }
public TSQLGroupByClause Parse(ITSQLTokenizer tokenizer) { TSQLGroupByClause groupBy = new TSQLGroupByClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.GROUP)) { throw new InvalidOperationException("GROUP expected."); } groupBy.Tokens.Add(tokenizer.Current); // subqueries TSQLTokenParserHelper.ReadUntilStop( tokenizer, groupBy, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.HAVING, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.ORDER, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(groupBy); }
public TSQLIntoClause Parse(ITSQLTokenizer tokenizer) { TSQLIntoClause into = new TSQLIntoClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.INTO)) { throw new InvalidOperationException("INTO expected."); } into.Tokens.Add(tokenizer.Current); while ( tokenizer.MoveNext() && ( tokenizer.Current.Type == TSQLTokenType.Identifier || tokenizer.Current.IsCharacter(TSQLCharacters.Period) || tokenizer.Current.Type == TSQLTokenType.Whitespace || tokenizer.Current.Type == TSQLTokenType.SingleLineComment || tokenizer.Current.Type == TSQLTokenType.MultilineComment || tokenizer.Current.IsKeyword(TSQLKeywords.AS) ) && // since USING is a stop word but it's also a TSQLTokenType.Identifier // we need to check for it explicitly !tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.USING) ) { into.Tokens.Add(tokenizer.Current); } return(into); }
public TSQLOrderByClause Parse(ITSQLTokenizer tokenizer) { TSQLOrderByClause orderBy = new TSQLOrderByClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.ORDER)) { throw new InvalidOperationException("ORDER expected."); } orderBy.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, orderBy, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OFFSET }, new List <TSQLKeywords>() { TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); // have to handle OFFSET parsing specially because it can contain FETCH, which would otherwise // signal the start of a new statement instead of still being contained within OFFSET if (tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET)) { TSQLOffsetClause offsetClause = new TSQLOffsetClauseParser().Parse(tokenizer); orderBy.Tokens.AddRange(offsetClause.Tokens); } return(orderBy); }
public TSQLOnClause Parse(ITSQLTokenizer tokenizer) { TSQLOnClause on = new TSQLOnClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.ON)) { throw new InvalidOperationException("ON expected."); } on.Tokens.Add(tokenizer.Current); // TODO: tighten logic to handle tables named OUTPUT, but still handle ON usage in MERGE TSQLTokenParserHelper.ReadUntilStop( tokenizer, on, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT, TSQLFutureKeywords.USING }, new List <TSQLKeywords>() { TSQLKeywords.INNER, TSQLKeywords.OUTER, TSQLKeywords.JOIN, TSQLKeywords.WHEN }, lookForStatementStarts: true); return(on); }
public TSQLIntoClause Parse(ITSQLTokenizer tokenizer) { TSQLIntoClause into = new TSQLIntoClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.INTO)) { throw new ApplicationException("INTO expected."); } into.Tokens.Add(tokenizer.Current); while ( tokenizer.MoveNext() && ( tokenizer.Current.Type == TSQLTokenType.Identifier || tokenizer.Current.IsCharacter(TSQLCharacters.Period) || tokenizer.Current.Type == TSQLTokenType.Whitespace || tokenizer.Current.Type == TSQLTokenType.SingleLineComment || tokenizer.Current.Type == TSQLTokenType.MultilineComment )) { into.Tokens.Add(tokenizer.Current); } return(into); }
public TSQLUnknownStatement Parse(ITSQLTokenizer tokenizer) { TSQLUnknownStatement statement = new TSQLUnknownStatement(); statement.Tokens.Add(tokenizer.Current); while ( tokenizer.MoveNext() && !( tokenizer.Current is TSQLCharacter && tokenizer.Current.AsCharacter.Character == TSQLCharacters.Semicolon )) { statement.Tokens.Add(tokenizer.Current); } if ( tokenizer.Current != null && tokenizer.Current is TSQLCharacter && tokenizer.Current.AsCharacter.Character == TSQLCharacters.Semicolon) { statement.Tokens.Add(tokenizer.Current); } return(statement); }
public TSQLOffsetClause Parse(ITSQLTokenizer tokenizer) { TSQLOffsetClause offset = new TSQLOffsetClause(); if (!tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET)) { throw new InvalidOperationException("OFFSET expected."); } offset.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, offset, new List <TSQLFutureKeywords> { }, new List <TSQLKeywords> { TSQLKeywords.FETCH }, lookForStatementStarts: true); if (tokenizer.Current.IsKeyword(TSQLKeywords.FETCH)) { TSQLFetchClause fetchClause = new TSQLFetchClauseParser().Parse(tokenizer); offset.Tokens.AddRange(fetchClause.Tokens); } return(offset); }
public TSQLOrderByClause Parse(ITSQLTokenizer tokenizer) { TSQLOrderByClause orderBy = new TSQLOrderByClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.ORDER)) { throw new InvalidOperationException("ORDER expected."); } orderBy.Tokens.Add(tokenizer.Current); // subqueries TSQLSubqueryHelper.ReadUntilStop( tokenizer, orderBy, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(orderBy); }
public TSQLValuesExpression Parse(ITSQLTokenizer tokenizer) { TSQLValuesExpression valuesExpression = new TSQLValuesExpression(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.VALUES)) { throw new InvalidOperationException("VALUES expected."); } valuesExpression.Tokens.Add(tokenizer.Current); TSQLSubqueryHelper.ReadUntilStop( tokenizer, valuesExpression, // stop words come from usage in MERGE new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.ON, TSQLKeywords.WHEN }, // INSERT INTO ... VALUES ... SELECT lookForStatementStarts: true); return(valuesExpression); }
public TSQLDeleteClause Parse(ITSQLTokenizer tokenizer) { TSQLDeleteClause delete = new TSQLDeleteClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.DELETE)) { throw new InvalidOperationException("DELETE expected."); } delete.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, delete, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(delete); }
public TSQLMergeClause Parse(ITSQLTokenizer tokenizer) { TSQLMergeClause merge = new TSQLMergeClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.MERGE)) { throw new InvalidOperationException("MERGE expected."); } merge.Tokens.Add(tokenizer.Current); // can contain TOP() TSQLTokenParserHelper.ReadUntilStop( tokenizer, merge, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT, TSQLFutureKeywords.USING }, new List <TSQLKeywords>() { TSQLKeywords.INTO, TSQLKeywords.AS, TSQLKeywords.ON, TSQLKeywords.WHEN }, lookForStatementStarts: true); return(merge); }
public TSQLInsertClause Parse(ITSQLTokenizer tokenizer) { TSQLInsertClause insert = new TSQLInsertClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.INSERT)) { throw new InvalidOperationException("INSERT expected."); } insert.Tokens.Add(tokenizer.Current); TSQLSubqueryHelper.ReadUntilStop( tokenizer, insert, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.SELECT, TSQLKeywords.EXECUTE, TSQLKeywords.VALUES, TSQLKeywords.DEFAULT }, lookForStatementStarts: false); return(insert); }
public TSQLUpdateClause Parse(ITSQLTokenizer tokenizer) { TSQLUpdateClause update = new TSQLUpdateClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.UPDATE)) { throw new InvalidOperationException("UPDATE expected."); } update.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, update, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.SET }, lookForStatementStarts: false); return(update); }
public TSQLWhereClause Parse(ITSQLTokenizer tokenizer) { TSQLWhereClause where = new TSQLWhereClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.WHERE)) { throw new InvalidOperationException("WHERE expected."); } where.Tokens.Add(tokenizer.Current); // subqueries TSQLSubqueryHelper.ReadUntilStop( tokenizer, where, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.ORDER, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(where); }
public TSQLHavingClause Parse(ITSQLTokenizer tokenizer) { TSQLHavingClause having = new TSQLHavingClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.HAVING)) { throw new InvalidOperationException("HAVING expected."); } having.Tokens.Add(tokenizer.Current); // subqueries TSQLSubqueryHelper.ReadUntilStop( tokenizer, having, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(having); }
public TSQLMergeStatementParser(TSQLWithClause with, ITSQLTokenizer tokenizer) : this(tokenizer) { Statement.With = with; Statement.Tokens.AddRange(with.Tokens); }
public TSQLForClause Parse(ITSQLTokenizer tokenizer) { // FOR XML AUTO TSQLForClause forClause = new TSQLForClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.FOR)) { throw new InvalidOperationException("FOR expected."); } forClause.Tokens.Add(tokenizer.Current); while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && ( tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { forClause.Tokens.Add(tokenizer.Current); } return(forClause); }
public TSQLWhenClause Parse(ITSQLTokenizer tokenizer) { TSQLWhenClause when = new TSQLWhenClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.WHEN)) { throw new InvalidOperationException("WHEN expected."); } when.Tokens.Add(tokenizer.Current); // we don't have to worry about accidentally running into the next statement. // https://docs.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql // The MERGE statement requires a semicolon (;) as a statement terminator. // Error 10713 is raised when a MERGE statement is run without the terminator. TSQLTokenParserHelper.ReadUntilStop( tokenizer, when, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.WHEN, TSQLKeywords.OPTION }, lookForStatementStarts: false); return(when); }
public TSQLUsingClause Parse(ITSQLTokenizer tokenizer) { TSQLUsingClause usingClause = new TSQLUsingClause(); if (!tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.USING)) { throw new InvalidOperationException("USING expected."); } usingClause.Tokens.Add(tokenizer.Current); /* can contain: * * <table_source> ::= * { * table_or_view_name [ [ AS ] table_alias ] [ <tablesample_clause> ] * [ WITH ( table_hint [ [ , ]...n ] ) ] * | rowset_function [ [ AS ] table_alias ] * [ ( bulk_column_alias [ ,...n ] ) ] * | user_defined_function [ [ AS ] table_alias ] * | OPENXML <openxml_clause> * | derived_table [ AS ] table_alias [ ( column_alias [ ,...n ] ) ] * | <joined_table> * | <pivoted_table> * | <unpivoted_table> * } * https://docs.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql?view=sql-server-ver15#syntax */ int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( // ON is required in MERGE statement after USING TSQLKeywords.ON ) ) )) { TSQLTokenParserHelper.RecurseParens( tokenizer, usingClause, ref nestedLevel); } return(usingClause); }
public static void ReadCommentsAndWhitespace( ITSQLTokenizer tokenizer, TSQLElement element) { ReadCommentsAndWhitespace( tokenizer, element.Tokens); }
public TSQLSelectClause Parse(ITSQLTokenizer tokenizer) { TSQLSelectClause select = new TSQLSelectClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.SELECT)) { throw new InvalidOperationException("SELECT expected."); } select.Tokens.Add(tokenizer.Current); // can contain ALL, DISTINCT, TOP, PERCENT, WITH TIES, AS // ends with FROM, semicolon, or keyword other than those listed above, when used outside of parens // recursively walk down and back up parens int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.INTO, TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLSubqueryHelper.RecurseParens( tokenizer, select, ref nestedLevel); } return(select); }
public TSQLFromClause Parse(ITSQLTokenizer tokenizer) { TSQLFromClause from = new TSQLFromClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.FROM)) { throw new InvalidOperationException("FROM expected."); } from.Tokens.Add(tokenizer.Current); // derived tables // TVF int nestedLevel = 0; while ( tokenizer.MoveNext() && !( tokenizer.Current.Type == TSQLTokenType.Character && tokenizer.Current.AsCharacter.Character == TSQLCharacters.Semicolon ) && !( nestedLevel == 0 && tokenizer.Current.Type == TSQLTokenType.Character && tokenizer.Current.AsCharacter.Character == TSQLCharacters.CloseParentheses ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.WHERE, TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLSubqueryHelper.RecurseParens( tokenizer, from, ref nestedLevel); } return(from); }
public TSQLSetClause Parse(ITSQLTokenizer tokenizer) { TSQLSetClause set = new TSQLSetClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.SET)) { throw new InvalidOperationException("SET expected."); } set.Tokens.Add(tokenizer.Current); // TODO: // parse this rare but valid horror scenario // update output // set output.output = 1 // output deleted.* // maybe create assignment expression parser? int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || ( tokenizer.Current.Type != TSQLTokenType.Keyword && !tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OUTPUT) ) || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLSubqueryHelper.RecurseParens( tokenizer, set, ref nestedLevel); } return(set); }
public static void ReadCommentsAndWhitespace( ITSQLTokenizer tokenizer, List <TSQLToken> savedTokens) { while (tokenizer.Current.IsWhitespace() || tokenizer.Current.IsComment()) { savedTokens.Add(tokenizer.Current); tokenizer.MoveNext(); } }
public TSQLArgumentList Parse(ITSQLTokenizer tokenizer) { List <TSQLToken> tokens = new List <TSQLToken>(); // need to do this before starting the argument loop // so we can handle an empty argument list of just whitespace // and comments TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); TSQLValueAsTypeExpression argument = new TSQLValueAsTypeExpression(); TSQLExpression expression = new TSQLValueExpressionParser().Parse(tokenizer); argument.Expression = expression; tokens.AddRange(expression.Tokens); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); if (!tokenizer.Current.IsKeyword(TSQLKeywords.AS)) { throw new InvalidOperationException("AS expected."); } tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, tokens); argument.DataType = tokenizer.Current.AsIdentifier; tokens.Add(tokenizer.Current); // reading until closing paren TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, tokens); TSQLArgumentList argList = new TSQLArgumentList( new List <TSQLExpression> { argument }); argList.Tokens.AddRange(tokens); return(argList); }
public static void ReadThroughAnyCommentsOrWhitespace( ITSQLTokenizer tokenizer, List <TSQLToken> savedTokens) { while ( tokenizer.MoveNext() && ( tokenizer.Current.IsWhitespace() || tokenizer.Current.IsComment()) ) { savedTokens.Add(tokenizer.Current); } }
public TSQLExpression Parse(ITSQLTokenizer tokenizer) { TSQLExpression expression = ParseNext(tokenizer); if ( tokenizer.Current != null && tokenizer.Current.Text != "*" && tokenizer.Current.Type.In( TSQLTokenType.Operator) && // check for operator =, when expression type is column, and don't parse operator if found // e.g. IsFinishedGoods = p.FinishedGoodsFlag ( tokenizer.Current.Text != "=" || expression?.Type != TSQLExpressionType.Column )) { if ( expression?.Type == TSQLExpressionType.Variable && // https://docs.microsoft.com/en-us/sql/t-sql/language-elements/compound-operators-transact-sql new string[] { "=", "+=", "-=", "*=", "/=", "%=", "&=", "|=" }.Contains(tokenizer.Current.AsOperator.Text)) { return(new TSQLVariableAssignmentExpressionParser().Parse( tokenizer, expression.AsVariable)); } else { return(new TSQLOperatorExpressionParser().Parse( tokenizer, expression)); } } else { return(expression); } }
public TSQLWhereClause Parse(ITSQLTokenizer tokenizer) { TSQLWhereClause where = new TSQLWhereClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.WHERE)) { throw new InvalidOperationException("WHERE expected."); } where.Tokens.Add(tokenizer.Current); // subqueries int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.ORDER, TSQLKeywords.FOR, TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLSubqueryHelper.RecurseParens( tokenizer, where, ref nestedLevel); } return(where); }
public TSQLCommonTableExpressionClause Parse(ITSQLTokenizer tokenizer) { TSQLCommonTableExpressionClause cte = new TSQLCommonTableExpressionClause(); if (tokenizer.Current.Type != Tokens.TSQLTokenType.Identifier) { throw new ApplicationException("Identifier expected."); } cte.Name = tokenizer.Current.AsIdentifier.Name; tokenizer.MoveNext(); return(cte); }