public TSQLWhenClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLWhenClause when = new TSQLWhenClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.WHEN))
            {
                throw new InvalidOperationException("WHEN expected.");
            }

            when.Tokens.Add(tokenizer.Current);

            // we don't have to worry about accidentally running into the next statement.

            // https://docs.microsoft.com/en-us/sql/t-sql/statements/merge-transact-sql
            // The MERGE statement requires a semicolon (;) as a statement terminator.
            // Error 10713 is raised when a MERGE statement is run without the terminator.

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                when,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.WHEN,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: false);

            return(when);
        }
        public TSQLMergeClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLMergeClause merge = new TSQLMergeClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.MERGE))
            {
                throw new InvalidOperationException("MERGE expected.");
            }

            merge.Tokens.Add(tokenizer.Current);

            // can contain TOP()

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                merge,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT,
                TSQLFutureKeywords.USING
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.INTO,
                TSQLKeywords.AS,
                TSQLKeywords.ON,
                TSQLKeywords.WHEN
            },
                lookForStatementStarts: true);

            return(merge);
        }
        public TSQLValues Parse(ITSQLTokenizer tokenizer)
        {
            TSQLValues values = new TSQLValues();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.VALUES))
            {
                throw new InvalidOperationException("VALUES expected.");
            }

            values.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                values,
                // stop words come from usage in MERGE
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.ON,
                TSQLKeywords.WHEN
            },
                // INSERT INTO ... VALUES ... SELECT
                lookForStatementStarts: true);

            return(values);
        }
        public TSQLOnClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLOnClause on = new TSQLOnClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.ON))
            {
                throw new InvalidOperationException("ON expected.");
            }

            on.Tokens.Add(tokenizer.Current);

            // TODO: tighten logic to handle tables named OUTPUT, but still handle ON usage in MERGE

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                on,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT,
                TSQLFutureKeywords.USING
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.INNER,
                TSQLKeywords.OUTER,
                TSQLKeywords.JOIN,
                TSQLKeywords.WHEN
            },
                lookForStatementStarts: true);

            return(on);
        }
        public TSQLUpdateClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLUpdateClause update = new TSQLUpdateClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.UPDATE))
            {
                throw new InvalidOperationException("UPDATE expected.");
            }

            update.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                update,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.SET
            },
                lookForStatementStarts: false);

            return(update);
        }
        public TSQLGroupByClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLGroupByClause groupBy = new TSQLGroupByClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.GROUP))
            {
                throw new InvalidOperationException("GROUP expected.");
            }

            groupBy.Tokens.Add(tokenizer.Current);

            // subqueries

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                groupBy,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.HAVING,
                TSQLKeywords.UNION,
                TSQLKeywords.EXCEPT,
                TSQLKeywords.INTERSECT,
                TSQLKeywords.ORDER,
                TSQLKeywords.FOR,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: true);

            return(groupBy);
        }
Exemplo n.º 7
0
        public TSQLOffsetClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLOffsetClause offset = new TSQLOffsetClause();

            if (!tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET))
            {
                throw new InvalidOperationException("OFFSET expected.");
            }

            offset.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                offset,
                new List <TSQLFutureKeywords> {
            },
                new List <TSQLKeywords>
            {
                TSQLKeywords.FETCH
            },
                lookForStatementStarts: true);

            if (tokenizer.Current.IsKeyword(TSQLKeywords.FETCH))
            {
                TSQLFetchClause fetchClause = new TSQLFetchClauseParser().Parse(tokenizer);
                offset.Tokens.AddRange(fetchClause.Tokens);
            }

            return(offset);
        }
Exemplo n.º 8
0
        public TSQLInsertClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLInsertClause insert = new TSQLInsertClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.INSERT))
            {
                throw new InvalidOperationException("INSERT expected.");
            }

            insert.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                insert,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.SELECT,
                TSQLKeywords.EXECUTE,
                TSQLKeywords.VALUES,
                TSQLKeywords.DEFAULT
            },
                lookForStatementStarts: false);

            return(insert);
        }
        public TSQLOrderByClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLOrderByClause orderBy = new TSQLOrderByClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.ORDER))
            {
                throw new InvalidOperationException("ORDER expected.");
            }

            orderBy.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                orderBy,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OFFSET
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.FOR,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: true);

            // have to handle OFFSET parsing specially because it can contain FETCH, which would otherwise
            // signal the start of a new statement instead of still being contained within OFFSET
            if (tokenizer.Current.IsFutureKeyword(TSQLFutureKeywords.OFFSET))
            {
                TSQLOffsetClause offsetClause = new TSQLOffsetClauseParser().Parse(tokenizer);
                orderBy.Tokens.AddRange(offsetClause.Tokens);
            }

            return(orderBy);
        }
Exemplo n.º 10
0
        public TSQLDeleteClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLDeleteClause delete = new TSQLDeleteClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.DELETE))
            {
                throw new InvalidOperationException("DELETE expected.");
            }

            delete.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                delete,
                new List <TSQLFutureKeywords>()
            {
                TSQLFutureKeywords.OUTPUT
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.FROM,
                TSQLKeywords.WHERE,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: true);

            return(delete);
        }
        public TSQLHavingClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLHavingClause having = new TSQLHavingClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.HAVING))
            {
                throw new InvalidOperationException("HAVING expected.");
            }

            having.Tokens.Add(tokenizer.Current);

            // subqueries

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                having,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.ORDER,
                TSQLKeywords.UNION,
                TSQLKeywords.EXCEPT,
                TSQLKeywords.INTERSECT,
                TSQLKeywords.FOR,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: true);

            return(having);
        }
        public TSQLCaseExpression Parse(ITSQLTokenizer tokenizer)
        {
            TSQLCaseExpression caseExpression = new TSQLCaseExpression();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.CASE))
            {
                throw new InvalidOperationException("CASE expected.");
            }

            caseExpression.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                caseExpression,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.END
            },
                lookForStatementStarts: false);

            // this is different than the other clauses because the
            // stop word is still part of the expression instead of
            // being part of the next expression or clause like in
            // the other parsers
            caseExpression.Tokens.Add(tokenizer.Current);

            tokenizer.MoveNext();

            return(caseExpression);
        }
Exemplo n.º 13
0
        public TSQLExecuteStatement Parse()
        {
            Statement.Tokens.Add(Tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                Tokenizer,
                Statement,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
            },
                lookForStatementStarts: true);

            return(Statement);
        }
        public TSQLFetchClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLFetchClause fetchClause = new TSQLFetchClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.FETCH))
            {
                throw new InvalidOperationException("FETCH expected.");
            }

            fetchClause.Tokens.Add(tokenizer.Current);

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                fetchClause,
                new List <TSQLFutureKeywords> {
            },
                new List <TSQLKeywords> {
            },
                true);

            return(fetchClause);
        }
Exemplo n.º 15
0
        public TSQLFromClause Parse(ITSQLTokenizer tokenizer)
        {
            TSQLFromClause from = new TSQLFromClause();

            if (!tokenizer.Current.IsKeyword(TSQLKeywords.FROM))
            {
                throw new InvalidOperationException("FROM expected.");
            }

            from.Tokens.Add(tokenizer.Current);

            // derived tables
            // TVF

            TSQLTokenParserHelper.ReadUntilStop(
                tokenizer,
                from,
                new List <TSQLFutureKeywords>()
            {
            },
                new List <TSQLKeywords>()
            {
                TSQLKeywords.WHERE,
                TSQLKeywords.GROUP,
                TSQLKeywords.HAVING,
                TSQLKeywords.ORDER,
                TSQLKeywords.UNION,
                TSQLKeywords.EXCEPT,
                TSQLKeywords.INTERSECT,
                TSQLKeywords.FOR,
                TSQLKeywords.OPTION
            },
                lookForStatementStarts: true);

            return(from);
        }
Exemplo n.º 16
0
        public TSQLExpression ParseNext(
            ITSQLTokenizer tokenizer)
        {
            if (tokenizer.Current == null)
            {
                return(null);
            }

            // look at the current/first token to determine what to do

            if (tokenizer.Current.Text == "*")
            {
                TSQLMulticolumnExpression simpleMulti = new TSQLMulticolumnExpression();

                simpleMulti.Tokens.Add(tokenizer.Current);

                TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                    tokenizer,
                    simpleMulti.Tokens);

                return(simpleMulti);

                // still need to seperately check for p.* below
            }
            // this checks for unary operators, e.g. +, -, and ~
            else if (tokenizer.Current.Type.In(
                         TSQLTokenType.Operator))
            {
                return(null);
            }
            else if (tokenizer.Current.IsCharacter(
                         TSQLCharacters.OpenParentheses))
            {
                List <TSQLToken> tokens = new List <TSQLToken>();

                tokens.Add(tokenizer.Current);

                // read through any whitespace so we can check specifically for a SELECT
                TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                    tokenizer,
                    tokens);

                if (tokenizer.Current.IsKeyword(TSQLKeywords.SELECT))
                {
                    #region parse subquery

                    TSQLSubqueryExpression subquery = new TSQLSubqueryExpression();

                    subquery.Tokens.AddRange(tokens);

                    TSQLSelectStatement select = new TSQLSelectStatementParser(tokenizer).Parse();

                    subquery.Select = select;

                    subquery.Tokens.AddRange(select.Tokens);

                    if (tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses))
                    {
                        subquery.Tokens.Add(tokenizer.Current);

                        tokenizer.MoveNext();
                    }

                    return(subquery);

                    #endregion
                }
                else
                {
                    #region parse expression contained/grouped inside parenthesis

                    TSQLGroupedExpression group = new TSQLGroupedExpression();

                    group.Tokens.AddRange(tokens);

                    group.InnerExpression =
                        new TSQLValueExpressionParser().Parse(
                            tokenizer);
                    group.Tokens.AddRange(group.InnerExpression.Tokens);

                    if (tokenizer.Current.IsCharacter(
                            TSQLCharacters.CloseParentheses))
                    {
                        group.Tokens.Add(tokenizer.Current);
                        tokenizer.MoveNext();
                    }

                    return(group);

                    #endregion
                }
            }
            else if (tokenizer.Current.Type.In(
                         TSQLTokenType.Variable,
                         TSQLTokenType.SystemVariable))
            {
                TSQLVariableExpression variable = new TSQLVariableExpression();
                variable.Tokens.Add(tokenizer.Current);
                variable.Variable = tokenizer.Current.AsVariable;

                TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                    tokenizer,
                    variable.Tokens);

                return(variable);
            }
            else if (tokenizer.Current.Type.In(
                         TSQLTokenType.BinaryLiteral,
                         TSQLTokenType.MoneyLiteral,
                         TSQLTokenType.NumericLiteral,
                         TSQLTokenType.StringLiteral,
                         TSQLTokenType.IncompleteString))
            {
                TSQLConstantExpression constant = new TSQLConstantExpression();

                constant.Literal = tokenizer.Current.AsLiteral;

                constant.Tokens.Add(tokenizer.Current);

                TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                    tokenizer,
                    constant.Tokens);

                return(constant);
            }
            else if (tokenizer.Current.IsKeyword(TSQLKeywords.CASE))
            {
                return(new TSQLCaseExpressionParser().Parse(tokenizer));
            }
            else if (tokenizer.Current.Type.In(
                         TSQLTokenType.SystemColumnIdentifier,
                         TSQLTokenType.IncompleteIdentifier))
            {
                TSQLColumnExpression column = new TSQLColumnExpression();

                column.Column = tokenizer.Current.AsSystemColumnIdentifier;

                column.Tokens.Add(tokenizer.Current);

                TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                    tokenizer,
                    column.Tokens);

                return(column);
            }
            else if (tokenizer.Current.Type.In(
                         TSQLTokenType.Identifier,
                         TSQLTokenType.SystemIdentifier))
            {
                // column, with or without alias, or with full explicit table name with up to 5 parts

                // or function, up to 4 part naming

                // find last token up to and including possible first paren
                // if *, then multi column
                // if paren, then function
                // else column

                // alias would be any tokens prior to last period, removing whitespace

                List <TSQLToken> tokens = new List <TSQLToken>();

                tokens.Add(tokenizer.Current);

                while (tokenizer.MoveNext())
                {
                    if (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses))
                    {
                        #region parse function

                        TSQLFunctionExpression function = new TSQLFunctionExpression();

                        function.Tokens.AddRange(tokens);
                        function.Tokens.Add(tokenizer.Current);

                        var identityTokens = tokens
                                             .Where(t => !t.IsComment() && !t.IsWhitespace())
                                             .ToList();

                        function.Function =
                            identityTokens[identityTokens.Count - 1]
                            .AsIdentifier;

                        if (identityTokens.Count > 1)
                        {
                            function.QualifiedPath =
                                identityTokens
                                .GetRange(
                                    0,
                                    identityTokens.Count - 2);
                        }

                        tokenizer.MoveNext();

                        TSQLArgumentList arguments = null;

                        // CAST function has it's own very unique argument syntax
                        if (function.Function.IsIdentifier(TSQLIdentifiers.CAST))
                        {
                            arguments = new TSQLValueAsTypeExpressionParser().Parse(
                                tokenizer);
                        }
                        else
                        {
                            arguments = new TSQLArgumentListParser().Parse(
                                tokenizer);
                        }

                        function.Tokens.AddRange(arguments.Tokens);

                        function.Arguments = arguments;

                        if (tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses))
                        {
                            function.Tokens.Add(tokenizer.Current);
                        }

                        tokenizer.MoveNext();

                        TSQLTokenParserHelper.ReadCommentsAndWhitespace(
                            tokenizer,
                            function);

                        // look for windowed aggregate
                        if (tokenizer.Current.IsKeyword(TSQLKeywords.OVER))
                        {
                            function.Tokens.Add(tokenizer.Current);

                            tokenizer.MoveNext();

                            TSQLTokenParserHelper.ReadCommentsAndWhitespace(
                                tokenizer,
                                function);

                            if (tokenizer.Current.IsCharacter(TSQLCharacters.OpenParentheses))
                            {
                                function.Tokens.Add(tokenizer.Current);

                                // recursively look for final close parens
                                TSQLTokenParserHelper.ReadUntilStop(
                                    tokenizer,
                                    function,
                                    new List <TSQLFutureKeywords> {
                                },
                                    new List <TSQLKeywords> {
                                },
                                    lookForStatementStarts: false);

                                if (tokenizer.Current != null &&
                                    tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses))
                                {
                                    function.Tokens.Add(tokenizer.Current);

                                    tokenizer.MoveNext();
                                }
                            }
                        }

                        return(function);

                        #endregion
                    }
                    else if (tokenizer.Current.Text == "*")
                    {
                        #region parse multi column reference

                        // e.g. p.*

                        TSQLMulticolumnExpression multi = new TSQLMulticolumnExpression();

                        multi.Tokens.AddRange(tokens);

                        multi.Tokens.Add(tokenizer.Current);

                        List <TSQLToken> columnReference = tokens
                                                           .Where(t => !t.IsComment() && !t.IsWhitespace())
                                                           .ToList();

                        if (columnReference.Count > 0)
                        {
                            // p.* will have the single token p in the final list

                            // AdventureWorks..ErrorLog.* will have 4 tokens in the final list
                            // e.g. {AdventureWorks, ., ., ErrorLog}

                            multi.TableReference = columnReference
                                                   .GetRange(0, columnReference
                                                             .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period)))
                                                   .ToList();
                        }

                        TSQLTokenParserHelper.ReadThroughAnyCommentsOrWhitespace(
                            tokenizer,
                            multi.Tokens);

                        return(multi);

                        #endregion
                    }
                    else if (
                        tokenizer.Current.IsCharacter(TSQLCharacters.Comma) ||
                        tokenizer.Current.IsCharacter(TSQLCharacters.CloseParentheses) ||
                        tokenizer.Current.Type.In(
                            TSQLTokenType.Keyword,
                            TSQLTokenType.Operator) ||

                        // this will be a nasty check, but I don't want to copy the internal logic elsewhere

                        // two identifiers in a row means that the second one is an alias
                        (
                            tokenizer.Current.Type.In(
                                TSQLTokenType.Identifier,
                                TSQLTokenType.IncompleteIdentifier) &&
                            tokens
                            .Where(t => !t.IsComment() && !t.IsWhitespace())
                            .LastOrDefault()
                            ?.Type.In(
                                TSQLTokenType.Identifier,
                                TSQLTokenType.BinaryLiteral,
                                TSQLTokenType.MoneyLiteral,
                                TSQLTokenType.NumericLiteral,
                                TSQLTokenType.StringLiteral,
                                TSQLTokenType.SystemColumnIdentifier,
                                TSQLTokenType.SystemIdentifier,
                                TSQLTokenType.SystemVariable,
                                TSQLTokenType.Variable
                                ) == true                                         // Operator '&&' cannot be applied to operands of type 'bool' and 'bool?'
                        ))
                    {
                        TSQLColumnExpression column = new TSQLColumnExpression();

                        column.Tokens.AddRange(tokens);

                        List <TSQLToken> columnReference = tokens
                                                           .Where(t => !t.IsComment() && !t.IsWhitespace())
                                                           .ToList();

                        if (columnReference.Count > 1)
                        {
                            // p.ProductID will have the single token p in the final list

                            // AdventureWorks..ErrorLog.ErrorLogID will have 4 tokens in the final list
                            // e.g. {AdventureWorks, ., ., ErrorLog}

                            column.TableReference = columnReference
                                                    .GetRange(0, columnReference
                                                              .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period)))
                                                    .ToList();
                        }

                        column.Column = columnReference
                                        .Last()
                                        .AsIdentifier;

                        return(column);
                    }
                    else
                    {
                        tokens.Add(tokenizer.Current);
                    }
                }

                // this is the fall through if none of the "returns" hit above

                // will also hit if we parse a simple single column expression, e.g. "SELECT blah"

                TSQLColumnExpression simpleColumn = new TSQLColumnExpression();

                simpleColumn.Tokens.AddRange(tokens);

                List <TSQLToken> simpleColumnReference = tokens
                                                         .Where(t =>
                                                                !t.IsComment() &&
                                                                !t.IsWhitespace() &&
                                                                !t.IsCharacter(TSQLCharacters.Semicolon))
                                                         .ToList();

                if (simpleColumnReference.Count > 1)
                {
                    // p.ProductID will have the single token p in the final list

                    // AdventureWorks..ErrorLog.ErrorLogID will have 4 tokens in the final list
                    // e.g. {AdventureWorks, ., ., ErrorLog}

                    simpleColumn.TableReference = simpleColumnReference
                                                  .GetRange(0, simpleColumnReference
                                                            .FindLastIndex(t => t.IsCharacter(TSQLCharacters.Period)))
                                                  .ToList();
                }

                simpleColumn.Column = simpleColumnReference
                                      .Last()
                                      .AsIdentifier;

                return(simpleColumn);
            }
            else
            {
                return(null);
            }
        }