public TSQLHavingClause Parse(ITSQLTokenizer tokenizer) { TSQLHavingClause having = new TSQLHavingClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.HAVING)) { throw new InvalidOperationException("HAVING expected."); } having.Tokens.Add(tokenizer.Current); // subqueries TSQLTokenParserHelper.ReadUntilStop( tokenizer, having, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(having); }
public TSQLDeleteClause Parse(ITSQLTokenizer tokenizer) { TSQLDeleteClause delete = new TSQLDeleteClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.DELETE)) { throw new InvalidOperationException("DELETE expected."); } delete.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, delete, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(delete); }
public TSQLCaseExpression Parse(ITSQLTokenizer tokenizer) { TSQLCaseExpression caseExpression = new TSQLCaseExpression(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.CASE)) { throw new InvalidOperationException("CASE expected."); } caseExpression.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, caseExpression, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.END }, lookForStatementStarts: false); // this is different than the other clauses because the // stop word is still part of the expression instead of // being part of the next expression or clause like in // the other parsers caseExpression.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); return(caseExpression); }
public TSQLMergeClause Parse(ITSQLTokenizer tokenizer) { TSQLMergeClause merge = new TSQLMergeClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.MERGE)) { throw new InvalidOperationException("MERGE expected."); } merge.Tokens.Add(tokenizer.Current); // can contain TOP() TSQLTokenParserHelper.ReadUntilStop( tokenizer, merge, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT, TSQLFutureKeywords.USING }, new List <TSQLKeywords>() { TSQLKeywords.INTO, TSQLKeywords.AS, TSQLKeywords.ON, TSQLKeywords.WHEN }, lookForStatementStarts: true); return(merge); }
public TSQLOffsetClause Parse(ITSQLTokenizer tokenizer) { TSQLOffsetClause offset = new TSQLOffsetClause(); if (!tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET)) { throw new InvalidOperationException("OFFSET expected."); } offset.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, offset, new List <TSQLFutureKeywords> { }, new List <TSQLKeywords> { TSQLKeywords.FETCH }, lookForStatementStarts: true); if (tokenizer.Current.IsKeyword(TSQLKeywords.FETCH)) { TSQLFetchClause fetchClause = new TSQLFetchClauseParser().Parse(tokenizer); offset.Tokens.AddRange(fetchClause.Tokens); } return(offset); }
public TSQLGroupByClause Parse(ITSQLTokenizer tokenizer) { TSQLGroupByClause groupBy = new TSQLGroupByClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.GROUP)) { throw new InvalidOperationException("GROUP expected."); } groupBy.Tokens.Add(tokenizer.Current); // subqueries TSQLTokenParserHelper.ReadUntilStop( tokenizer, groupBy, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.HAVING, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.ORDER, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(groupBy); }
public TSQLOnClause Parse(ITSQLTokenizer tokenizer) { TSQLOnClause on = new TSQLOnClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.ON)) { throw new InvalidOperationException("ON expected."); } on.Tokens.Add(tokenizer.Current); // TODO: tighten logic to handle tables named OUTPUT, but still handle ON usage in MERGE TSQLTokenParserHelper.ReadUntilStop( tokenizer, on, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT, TSQLFutureKeywords.USING }, new List <TSQLKeywords>() { TSQLKeywords.INNER, TSQLKeywords.OUTER, TSQLKeywords.JOIN, TSQLKeywords.WHEN }, lookForStatementStarts: true); return(on); }
public TSQLOrderByClause Parse(ITSQLTokenizer tokenizer) { TSQLOrderByClause orderBy = new TSQLOrderByClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.ORDER)) { throw new InvalidOperationException("ORDER expected."); } orderBy.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, orderBy, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OFFSET }, new List <TSQLKeywords>() { TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); // have to handle OFFSET parsing specially because it can contain FETCH, which would otherwise // signal the start of a new statement instead of still being contained within OFFSET if (tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET)) { TSQLOffsetClause offsetClause = new TSQLOffsetClauseParser().Parse(tokenizer); orderBy.Tokens.AddRange(offsetClause.Tokens); } return(orderBy); }
public TSQLValues Parse(ITSQLTokenizer tokenizer) { TSQLValues values = new TSQLValues(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.VALUES)) { throw new InvalidOperationException("VALUES expected."); } values.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, values, // stop words come from usage in MERGE new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.ON, TSQLKeywords.WHEN }, // INSERT INTO ... VALUES ... SELECT lookForStatementStarts: true); return(values); }
public TSQLUpdateClause Parse(ITSQLTokenizer tokenizer) { TSQLUpdateClause update = new TSQLUpdateClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.UPDATE)) { throw new InvalidOperationException("UPDATE expected."); } update.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, update, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.SET }, lookForStatementStarts: false); return(update); }
public TSQLWhenClause Parse(ITSQLTokenizer tokenizer) { TSQLWhenClause when = new TSQLWhenClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.WHEN)) { throw new InvalidOperationException("WHEN expected."); } when.Tokens.Add(tokenizer.Current); // we don't have to worry about accidentally running into the next statement. // https://docs.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql // The MERGE statement requires a semicolon (;) as a statement terminator. // Error 10713 is raised when a MERGE statement is run without the terminator. TSQLTokenParserHelper.ReadUntilStop( tokenizer, when, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.WHEN, TSQLKeywords.OPTION }, lookForStatementStarts: false); return(when); }
public TSQLInsertClause Parse(ITSQLTokenizer tokenizer) { TSQLInsertClause insert = new TSQLInsertClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.INSERT)) { throw new InvalidOperationException("INSERT expected."); } insert.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, insert, new List <TSQLFutureKeywords>() { TSQLFutureKeywords.OUTPUT }, new List <TSQLKeywords>() { TSQLKeywords.SELECT, TSQLKeywords.EXECUTE, TSQLKeywords.VALUES, TSQLKeywords.DEFAULT }, lookForStatementStarts: false); return(insert); }
public TSQLUsingClause Parse(ITSQLTokenizer tokenizer) { TSQLUsingClause usingClause = new TSQLUsingClause(); if (!tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.USING)) { throw new InvalidOperationException("USING expected."); } usingClause.Tokens.Add(tokenizer.Current); /* can contain: * * <table_source> ::= * { * table_or_view_name [ [ AS ] table_alias ] [ <tablesample_clause> ] * [ WITH ( table_hint [ [ , ]...n ] ) ] * | rowset_function [ [ AS ] table_alias ] * [ ( bulk_column_alias [ ,...n ] ) ] * | user_defined_function [ [ AS ] table_alias ] * | OPENXML <openxml_clause> * | derived_table [ AS ] table_alias [ ( column_alias [ ,...n ] ) ] * | <joined_table> * | <pivoted_table> * | <unpivoted_table> * } * https://docs.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql?view=sql-server-ver15#syntax */ int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( // ON is required in MERGE statement after USING TSQLKeywords.ON ) ) )) { TSQLTokenParserHelper.RecurseParens( tokenizer, usingClause, ref nestedLevel); } return(usingClause); }
public TSQLArgumentList Parse(ITSQLTokenizer tokenizer) { List <TSQLToken> tokens = new List <TSQLToken>(); // need to do this before starting the argument loop // so we can handle an empty argument list of just whitespace // and comments TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); TSQLValueAsTypeExpression argument = new TSQLValueAsTypeExpression(); TSQLExpression expression = new TSQLValueExpressionParser().Parse(tokenizer); argument.Expression = expression; tokens.AddRange(expression.Tokens); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); if (!tokenizer.Current.IsKeyword(TSQLKeywords.AS)) { throw new InvalidOperationException("AS expected."); } tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, tokens); argument.DataType = tokenizer.Current.AsIdentifier; tokens.Add(tokenizer.Current); // reading until closing paren TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, tokens); TSQLArgumentList argList = new TSQLArgumentList( new List <TSQLExpression> { argument }); argList.Tokens.AddRange(tokens); return(argList); }
public TSQLSetClause Parse(ITSQLTokenizer tokenizer) { TSQLSetClause set = new TSQLSetClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.SET)) { throw new InvalidOperationException("SET expected."); } set.Tokens.Add(tokenizer.Current); // TODO: parse this rare but valid horror scenario // update output // set output.output = 1 // output deleted.* // maybe create assignment expression parser? int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || ( tokenizer.Current.Type != TSQLTokenType.Keyword && !tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OUTPUT) ) || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.OPTION ) && !tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLTokenParserHelper.RecurseParens( tokenizer, set, ref nestedLevel); } return(set); }
public TSQLArgumentList Parse(ITSQLTokenizer tokenizer) { List <TSQLExpression> arguments = new List <TSQLExpression>(); TSQLValueExpressionParser factory = new TSQLValueExpressionParser(); List <TSQLToken> tokens = new List <TSQLToken>(); // need to do this before starting the argument loop // so we can handle an empty argument list of just whitespace // and comments TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); while ( tokenizer.Current != null && !tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); TSQLExpression argument = factory.Parse(tokenizer); tokens.AddRange(argument.Tokens); arguments.Add(argument); if (tokenizer.Current.IsCharacter(TSQLCharacters.Comma)) { tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, tokens); } } TSQLArgumentList argList = new TSQLArgumentList( arguments); argList.Tokens.AddRange(tokens); return(argList); }
public TSQLExecuteStatement Parse() { Statement.Tokens.Add(Tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( Tokenizer, Statement, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { }, lookForStatementStarts: true); return(Statement); }
public TSQLOptionClause Parse(ITSQLTokenizer tokenizer) { TSQLOptionClause option = new TSQLOptionClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.OPTION)) { throw new InvalidOperationException("OPTION expected."); } option.Tokens.Add(tokenizer.Current); while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { option.Tokens.Add(tokenizer.Current); } do { if (tokenizer.Current != null) { option.Tokens.Add(tokenizer.Current); } } while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)); if (tokenizer.Current != null) { option.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, option); return(option); }
public TSQLFetchClause Parse(ITSQLTokenizer tokenizer) { TSQLFetchClause fetchClause = new TSQLFetchClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.FETCH)) { throw new InvalidOperationException("FETCH expected."); } fetchClause.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadUntilStop( tokenizer, fetchClause, new List <TSQLFutureKeywords> { }, new List <TSQLKeywords> { }, true); return(fetchClause); }
public TSQLIntoClause Parse(ITSQLTokenizer tokenizer) { TSQLIntoClause into = new TSQLIntoClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.INTO)) { throw new InvalidOperationException("INTO expected."); } into.Tokens.Add(tokenizer.Current); int nestedLevel = 0; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && ( nestedLevel > 0 || tokenizer.Current.Type == TSQLTokenType.Identifier || tokenizer.Current.IsCharacter(TSQLCharacters.Period) || tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses) || tokenizer.Current.Type == TSQLTokenType.Whitespace || tokenizer.Current.Type == TSQLTokenType.SingleLineComment || tokenizer.Current.Type == TSQLTokenType.MultilineComment )) { TSQLTokenParserHelper.RecurseParens( tokenizer, into, ref nestedLevel); } return(into); }
public TSQLFromClause Parse(ITSQLTokenizer tokenizer) { TSQLFromClause from = new TSQLFromClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.FROM)) { throw new InvalidOperationException("FROM expected."); } from.Tokens.Add(tokenizer.Current); // derived tables // TVF TSQLTokenParserHelper.ReadUntilStop( tokenizer, from, new List <TSQLFutureKeywords>() { }, new List <TSQLKeywords>() { TSQLKeywords.WHERE, TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION }, lookForStatementStarts: true); return(from); }
public TSQLSetOperatorClause Parse(ITSQLTokenizer tokenizer) { TSQLSetOperatorClause set = null; if (tokenizer.Current.IsKeyword(TSQLKeywords.UNION)) { set = new TSQLUnionClause(); set.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, set.Tokens); if (tokenizer.Current.IsKeyword(TSQLKeywords.ALL)) { set.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } } else if (tokenizer.Current.IsKeyword(TSQLKeywords.EXCEPT)) { set = new TSQLExceptClause(); set.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } else if (tokenizer.Current.IsKeyword(TSQLKeywords.INTERSECT)) { set = new TSQLIntersectClause(); set.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, set); int level = 0; while (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { set.Tokens.Add(tokenizer.Current); level++; tokenizer.MoveNext(); } TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, set); TSQLSelectStatement select = new TSQLLimitedSelectStatementParser(tokenizer).Parse(); set.Select = select; set.Tokens.AddRange(select.Tokens); while (level > 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { set.Tokens.Add(tokenizer.Current); level--; tokenizer.MoveNext(); } TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, set); return(set); }
public TSQLExpression ParseNext( ITSQLTokenizer tokenizer) { if (tokenizer.Current == null) { return(null); } // look at the current/first token to determine what to do if (tokenizer.Current.Text == "*") { TSQLMulticolumnExpression simpleMulti = new TSQLMulticolumnExpression(); simpleMulti.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, simpleMulti.Tokens); return(simpleMulti); // still need to seperately check for p.* below } // this checks for unary operators, e.g. +, -, and ~ else if (tokenizer.Current.Type.In( TSQLTokenType.Operator)) { return(null); } else if (tokenizer.Current.IsCharacter( TSQLCharacters.OpenParentheses)) { List <TSQLToken> tokens = new List <TSQLToken>(); tokens.Add(tokenizer.Current); // read through any whitespace so we can check specifically for a SELECT TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, tokens); if (tokenizer.Current.IsKeyword(TSQLKeywords.SELECT)) { #region parse subquery TSQLSubqueryExpression subquery = new TSQLSubqueryExpression(); subquery.Tokens.AddRange(tokens); TSQLSelectStatement select = new TSQLSelectStatementParser(tokenizer).Parse(); subquery.Select = select; subquery.Tokens.AddRange(select.Tokens); if (tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { subquery.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } return(subquery); #endregion } else { #region parse expression contained/grouped inside parenthesis TSQLGroupedExpression group = new TSQLGroupedExpression(); group.Tokens.AddRange(tokens); group.InnerExpression = new TSQLValueExpressionParser().Parse( tokenizer); group.Tokens.AddRange(group.InnerExpression.Tokens); if (tokenizer.Current.IsCharacter( TSQLCharacters.CloseParentheses)) { group.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } return(group); #endregion } } else if (tokenizer.Current.Type.In( TSQLTokenType.Variable, TSQLTokenType.SystemVariable)) { TSQLVariableExpression variable = new TSQLVariableExpression(); variable.Tokens.Add(tokenizer.Current); variable.Variable = tokenizer.Current.AsVariable; TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, variable.Tokens); return(variable); } else if (tokenizer.Current.Type.In( TSQLTokenType.BinaryLiteral, TSQLTokenType.MoneyLiteral, TSQLTokenType.NumericLiteral, TSQLTokenType.StringLiteral, TSQLTokenType.IncompleteString)) { TSQLConstantExpression constant = new TSQLConstantExpression(); constant.Literal = tokenizer.Current.AsLiteral; constant.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, constant.Tokens); return(constant); } else if (tokenizer.Current.IsKeyword(TSQLKeywords.CASE)) { return(new TSQLCaseExpressionParser().Parse(tokenizer)); } else if (tokenizer.Current.Type.In( TSQLTokenType.SystemColumnIdentifier, TSQLTokenType.IncompleteIdentifier)) { TSQLColumnExpression column = new TSQLColumnExpression(); column.Column = tokenizer.Current.AsSystemColumnIdentifier; column.Tokens.Add(tokenizer.Current); TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, column.Tokens); return(column); } else if (tokenizer.Current.Type.In( TSQLTokenType.Identifier, TSQLTokenType.SystemIdentifier)) { // column, with or without alias, or with full explicit table name with up to 5 parts // or function, up to 4 part naming // find last token up to and including possible first paren // if *, then multi column // if paren, then function // else column // alias would be any tokens prior to last period, removing whitespace List <TSQLToken> tokens = new List <TSQLToken>(); tokens.Add(tokenizer.Current); while (tokenizer.MoveNext()) { if (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { #region parse function TSQLFunctionExpression function = new TSQLFunctionExpression(); function.Tokens.AddRange(tokens); function.Tokens.Add(tokenizer.Current); var identityTokens = tokens .Where(t => !t.IsComment() && !t.IsWhitespace()) .ToList(); function.Function = identityTokens[identityTokens.Count - 1] .AsIdentifier; if (identityTokens.Count > 1) { function.QualifiedPath = identityTokens .GetRange( 0, identityTokens.Count - 2); } tokenizer.MoveNext(); TSQLArgumentList arguments = null; // CAST function has it's own very unique argument syntax if (function.Function.IsIdentifier(TSQLIdentifiers.CAST)) { arguments = new TSQLValueAsTypeExpressionParser().Parse( tokenizer); } else { arguments = new TSQLArgumentListParser().Parse( tokenizer); } function.Tokens.AddRange(arguments.Tokens); function.Arguments = arguments; if (tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { function.Tokens.Add(tokenizer.Current); } tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, function); // look for windowed aggregate if (tokenizer.Current.IsKeyword(TSQLKeywords.OVER)) { function.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, function); if (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { function.Tokens.Add(tokenizer.Current); // recursively look for final close parens TSQLTokenParserHelper.ReadUntilStop( tokenizer, function, new List <TSQLFutureKeywords> { }, new List <TSQLKeywords> { }, lookForStatementStarts: false); if (tokenizer.Current != null && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { function.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); } } } return(function); #endregion } else if (tokenizer.Current.Text == "*") { #region parse multi column reference // e.g. p.* TSQLMulticolumnExpression multi = new TSQLMulticolumnExpression(); multi.Tokens.AddRange(tokens); multi.Tokens.Add(tokenizer.Current); List <TSQLToken> columnReference = tokens .Where(t => !t.IsComment() && !t.IsWhitespace()) .ToList(); if (columnReference.Count > 0) { // p.* will have the single token p in the final list // AdventureWorks..ErrorLog.* will have 4 tokens in the final list // e.g. {AdventureWorks, ., ., ErrorLog} multi.TableReference = columnReference .GetRange(0, columnReference .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period))) .ToList(); } TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace( tokenizer, multi.Tokens); return(multi); #endregion } else if ( tokenizer.Current.IsCharacter(TSQLCharacters.Comma) || tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) || tokenizer.Current.Type.In( TSQLTokenType.Keyword, TSQLTokenType.Operator) || // this will be a nasty check, but I don't want to copy the internal logic elsewhere // two identifiers in a row means that the second one is an alias ( tokenizer.Current.Type.In( TSQLTokenType.Identifier, TSQLTokenType.IncompleteIdentifier) && tokens .Where(t => !t.IsComment() && !t.IsWhitespace()) .LastOrDefault() ?.Type.In( TSQLTokenType.Identifier, TSQLTokenType.BinaryLiteral, TSQLTokenType.MoneyLiteral, TSQLTokenType.NumericLiteral, TSQLTokenType.StringLiteral, TSQLTokenType.SystemColumnIdentifier, TSQLTokenType.SystemIdentifier, TSQLTokenType.SystemVariable, TSQLTokenType.Variable ) == true // Operator '&&' cannot be applied to operands of type 'bool' and 'bool?' )) { TSQLColumnExpression column = new TSQLColumnExpression(); column.Tokens.AddRange(tokens); List <TSQLToken> columnReference = tokens .Where(t => !t.IsComment() && !t.IsWhitespace()) .ToList(); if (columnReference.Count > 1) { // p.ProductID will have the single token p in the final list // AdventureWorks..ErrorLog.ErrorLogID will have 4 tokens in the final list // e.g. {AdventureWorks, ., ., ErrorLog} column.TableReference = columnReference .GetRange(0, columnReference .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period))) .ToList(); } column.Column = columnReference .Last() .AsIdentifier; return(column); } else { tokens.Add(tokenizer.Current); } } // this is the fall through if none of the "returns" hit above // will also hit if we parse a simple single column expression, e.g. "SELECT blah" TSQLColumnExpression simpleColumn = new TSQLColumnExpression(); simpleColumn.Tokens.AddRange(tokens); List <TSQLToken> simpleColumnReference = tokens .Where(t => !t.IsComment() && !t.IsWhitespace() && !t.IsCharacter(TSQLCharacters.Semicolon)) .ToList(); if (simpleColumnReference.Count > 1) { // p.ProductID will have the single token p in the final list // AdventureWorks..ErrorLog.ErrorLogID will have 4 tokens in the final list // e.g. {AdventureWorks, ., ., ErrorLog} simpleColumn.TableReference = simpleColumnReference .GetRange(0, simpleColumnReference .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period))) .ToList(); } simpleColumn.Column = simpleColumnReference .Last() .AsIdentifier; return(simpleColumn); } else { return(null); } }
public TSQLSelectColumn Parse(ITSQLTokenizer tokenizer) { TSQLSelectColumn column = new TSQLSelectColumn(); TSQLExpression columnExpression = new TSQLSelectExpressionParser().Parse(tokenizer); column.Expression = columnExpression; column.Tokens.AddRange(columnExpression.Tokens); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, column); // check for operator =, when expression type is column, and return new column expression with alias // e.g. IsFinishedGoods = p.FinishedGoodsFlag if ( tokenizer.Current != null && tokenizer.Current.Type == TSQLTokenType.Operator && tokenizer.Current.Text == "=" && columnExpression.Type == TSQLExpressionType.Column ) { column.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLExpression actualColumn = new TSQLValueExpressionParser().Parse(tokenizer); column.Expression = actualColumn; column.ColumnAlias = columnExpression.AsColumn.Column; column.Tokens.AddRange(actualColumn.Tokens); } else { if (tokenizer.Current.IsKeyword(TSQLKeywords.AS)) { column.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, column); } if (tokenizer.Current != null && tokenizer.Current.Type.In( TSQLTokenType.Identifier, TSQLTokenType.SystemIdentifier, TSQLTokenType.IncompleteIdentifier)) { column.Tokens.Add(tokenizer.Current); if (tokenizer.Current.Type.In( TSQLTokenType.Identifier, TSQLTokenType.SystemIdentifier)) { column.ColumnAlias = tokenizer.Current.AsIdentifier; } tokenizer.MoveNext(); } } return(column); }
public TSQLSelectStatement Parse() { // (SELECT 1) int level = 0; while (Tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { Statement.Tokens.Add(Tokenizer.Current); level++; Tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( Tokenizer, Statement); } TSQLSelectClause selectClause = new TSQLSelectClauseParser().Parse(Tokenizer); Statement.Select = selectClause; Statement.Tokens.AddRange(selectClause.Tokens); if (Tokenizer.Current.IsKeyword(TSQLKeywords.INTO)) { TSQLIntoClause intoClause = new TSQLIntoClauseParser().Parse(Tokenizer); Statement.Into = intoClause; Statement.Tokens.AddRange(intoClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.FROM)) { TSQLFromClause fromClause = new TSQLFromClauseParser().Parse(Tokenizer); Statement.From = fromClause; Statement.Tokens.AddRange(fromClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.WHERE)) { TSQLWhereClause whereClause = new TSQLWhereClauseParser().Parse(Tokenizer); Statement.Where = whereClause; Statement.Tokens.AddRange(whereClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.GROUP)) { TSQLGroupByClause groupByClause = new TSQLGroupByClauseParser().Parse(Tokenizer); Statement.GroupBy = groupByClause; Statement.Tokens.AddRange(groupByClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.HAVING)) { TSQLHavingClause havingClause = new TSQLHavingClauseParser().Parse(Tokenizer); Statement.Having = havingClause; Statement.Tokens.AddRange(havingClause.Tokens); } while (level > 0 && Tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { Statement.Tokens.Add(Tokenizer.Current); level--; Tokenizer.MoveNext(); } while (Tokenizer.Current?.AsKeyword != null && Tokenizer.Current.AsKeyword.Keyword.In( TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT)) { TSQLSetOperatorClause set = new TSQLSetOperatorClauseParser().Parse(Tokenizer); Statement.SetOperators.Add(set); Statement.Tokens.AddRange(set.Tokens); } while (level > 0 && Tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { Statement.Tokens.Add(Tokenizer.Current); level--; Tokenizer.MoveNext(); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.ORDER)) { TSQLOrderByClause orderByClause = new TSQLOrderByClauseParser().Parse(Tokenizer); Statement.OrderBy = orderByClause; Statement.Tokens.AddRange(orderByClause.Tokens); } // order for OPTION and FOR doesn't seem to matter while ( Tokenizer.Current.IsKeyword(TSQLKeywords.FOR) || Tokenizer.Current.IsKeyword(TSQLKeywords.OPTION)) { if (Tokenizer.Current.IsKeyword(TSQLKeywords.FOR)) { TSQLForClause forClause = new TSQLForClauseParser().Parse(Tokenizer); Statement.For = forClause; Statement.Tokens.AddRange(forClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.OPTION)) { TSQLOptionClause optionClause = new TSQLOptionClauseParser().Parse(Tokenizer); Statement.Option = optionClause; Statement.Tokens.AddRange(optionClause.Tokens); } } return(Statement); }
public TSQLSelectStatement Parse() { TSQLSelectClause selectClause = new TSQLSelectClauseParser().Parse(Tokenizer); Statement.Select = selectClause; Statement.Tokens.AddRange(selectClause.Tokens); if (Tokenizer.Current.IsKeyword(TSQLKeywords.INTO)) { TSQLIntoClause intoClause = new TSQLIntoClauseParser().Parse(Tokenizer); Statement.Into = intoClause; Statement.Tokens.AddRange(intoClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.FROM)) { TSQLFromClause fromClause = new TSQLFromClauseParser().Parse(Tokenizer); Statement.From = fromClause; Statement.Tokens.AddRange(fromClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.WHERE)) { TSQLWhereClause whereClause = new TSQLWhereClauseParser().Parse(Tokenizer); Statement.Where = whereClause; Statement.Tokens.AddRange(whereClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.GROUP)) { TSQLGroupByClause groupByClause = new TSQLGroupByClauseParser().Parse(Tokenizer); Statement.GroupBy = groupByClause; Statement.Tokens.AddRange(groupByClause.Tokens); } if (Tokenizer.Current.IsKeyword(TSQLKeywords.HAVING)) { TSQLHavingClause havingClause = new TSQLHavingClauseParser().Parse(Tokenizer); Statement.Having = havingClause; Statement.Tokens.AddRange(havingClause.Tokens); } // ORDER not allowed // FOR not allowed // OPTION not allowed TSQLTokenParserHelper.ReadCommentsAndWhitespace( Tokenizer, Statement); return(Statement); }
public TSQLWithClause Parse(ITSQLTokenizer tokenizer) { TSQLWithClause with = new TSQLWithClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.WITH)) { throw new InvalidOperationException("WITH expected."); } with.Tokens.Add(tokenizer.Current); // subqueries int nestedLevel = 0; int parenCount = 0; int identifierCount = 0; bool afterAs = false; while ( tokenizer.MoveNext() && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !( nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ) && !( // only allow a set of parens at root level // if it's following an AS // or if it's the column list between the CTE name and the AS nestedLevel == 0 && tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses) && afterAs && parenCount >= identifierCount ) && ( nestedLevel > 0 || tokenizer.Current.Type != TSQLTokenType.Keyword || ( tokenizer.Current.Type == TSQLTokenType.Keyword && !tokenizer.Current.AsKeyword.Keyword.In ( TSQLKeywords.SELECT, TSQLKeywords.INSERT, TSQLKeywords.UPDATE, TSQLKeywords.DELETE, TSQLKeywords.MERGE ) ) )) { if (nestedLevel == 0) { if (afterAs && tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { parenCount++; } else if (tokenizer.Current.Type == TSQLTokenType.Identifier) { identifierCount++; afterAs = false; } else if (tokenizer.Current.IsKeyword(TSQLKeywords.AS)) { afterAs = true; } } TSQLTokenParserHelper.RecurseParens( tokenizer, with, ref nestedLevel); } return(with); }
public TSQLSelectClause Parse(ITSQLTokenizer tokenizer) { TSQLSelectClause select = new TSQLSelectClause(); if (!tokenizer.Current.IsKeyword(TSQLKeywords.SELECT)) { throw new InvalidOperationException("SELECT expected."); } select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); if (tokenizer.Current.IsKeyword(TSQLKeywords.ALL) || tokenizer.Current.IsKeyword(TSQLKeywords.DISTINCT)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } if (tokenizer.Current.IsKeyword(TSQLKeywords.TOP)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); if (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); // handling for TOP(@RowsToReturn) // can also be used in a CROSS APPLY with an outer reference, e.g. TOP(p.RowCount) if (tokenizer.Current != null && tokenizer.Current.Type.In( TSQLTokenType.NumericLiteral, TSQLTokenType.Identifier, TSQLTokenType.IncompleteIdentifier, TSQLTokenType.SystemVariable, TSQLTokenType.Variable)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } if (tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } } else if (tokenizer.Current != null && tokenizer.Current.Type == TSQLTokenType.NumericLiteral) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } if (tokenizer.Current.IsKeyword(TSQLKeywords.PERCENT)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } if (tokenizer.Current.IsKeyword(TSQLKeywords.WITH)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); if (tokenizer.Current != null && tokenizer.Current.Type == TSQLTokenType.Identifier && tokenizer.Current.AsIdentifier.Text.Equals( "TIES", StringComparison.InvariantCultureIgnoreCase)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } } } while ( tokenizer.Current != null && !tokenizer.Current.IsCharacter(TSQLCharacters.Semicolon) && !tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) && !( tokenizer.Current.Type == TSQLTokenType.Keyword && ( tokenizer.Current.AsKeyword.Keyword.In( TSQLKeywords.INTO, TSQLKeywords.FROM, TSQLKeywords.WHERE, TSQLKeywords.GROUP, TSQLKeywords.HAVING, TSQLKeywords.ORDER, TSQLKeywords.UNION, TSQLKeywords.EXCEPT, TSQLKeywords.INTERSECT, TSQLKeywords.FOR, TSQLKeywords.OPTION) || tokenizer.Current.AsKeyword.Keyword.IsStatementStart() ) )) { TSQLSelectColumn column = new TSQLSelectColumnParser().Parse(tokenizer); select.Tokens.AddRange(column.Tokens); select.Columns.Add(column); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); if (tokenizer.Current.IsCharacter(TSQLCharacters.Comma)) { select.Tokens.Add(tokenizer.Current); tokenizer.MoveNext(); TSQLTokenParserHelper.ReadCommentsAndWhitespace( tokenizer, select); } } return(select); }