static void Main(string[] args) { var sqlText = @" create procedure something as select 100; select 200 exec sp_who; exec sp_who2; " ; var sql = new StringReader(sqlText); var parser = new TSql140Parser(false); var script = parser.Parse(sql, out IList <ParseError> errors); var visitor = new ExecVisitor(); script.Accept(visitor); TSqlParserToken startComment = new TSqlParserToken(TSqlTokenType.SingleLineComment, "/*"); TSqlParserToken endComment = new TSqlParserToken(TSqlTokenType.SingleLineComment, "*/"); var newScriptTokenStream = new List <TSqlParserToken>(script.ScriptTokenStream); for (var i = visitor.Statements.Count - 1; i >= 0; i--) { var stmt = visitor.Statements[i]; newScriptTokenStream.Insert(stmt.LastTokenIndex, endComment); newScriptTokenStream.Insert(stmt.FirstTokenIndex, startComment); } var newFragment = parser.Parse(newScriptTokenStream, out errors); Console.WriteLine(GetScript(newFragment.ScriptTokenStream)); }
private static string ChangeCreateToAlter(CreateFunctionStatement function, string wholeScript) { //get part of script we are interested in... var subScript = wholeScript.Substring(function.StartOffset, function.FragmentLength); IList <ParseError> errors; var fragment = new TSql130Parser(false).Parse(new StringReader(subScript), out errors); bool haveCreate = false; var output = new StringBuilder(); foreach (var token in fragment.ScriptTokenStream) { if (!haveCreate && token.TokenType == TSqlTokenType.Create) { var alterToken = new TSqlParserToken(TSqlTokenType.Alter, "alter"); output.Append(alterToken.Text); haveCreate = true; continue; } output.Append(token.Text); } return(output.ToString()); }
private void VisitHelper(ProcedureStatementBody node) { WasVisited = true; SqlFragment = node; TSqlStatement statement; if (node.StatementList.Statements.Any()) { var predicate = node.StatementList.Statements[0] as PredicateSetStatement; if (predicate != null) { HasNocountOn = predicate.Options == SetOptions.NoCount && predicate.IsOn; _approximateBodyStartTokenIndex = predicate.LastTokenIndex; } } statement = node.StatementList.Statements.FirstOrDefault(s => (s as SetTransactionIsolationLevelStatement) != null); if (statement != null) { var tran = statement as SetTransactionIsolationLevelStatement; HasTransactionIsolationLevel = tran.Level == IsolationLevel.ReadUncommitted; } for (int i = _approximateBodyStartTokenIndex; i < node.LastTokenIndex; i++) { TSqlParserToken token = node.ScriptTokenStream[i]; if (token.TokenType == TSqlTokenType.SingleLineComment) { HasComments = true; break; } } }
/// <summary> /// Used to add a new parameter to the SqlCommand parameters collection /// </summary> /// <param name="token">The TsqlParserToken to be processed</param> /// <param name="parameterNumber"> The next value for our parameter name</param> protected void AddToParameterCollection(TSqlParserToken token, int parameterNumber) { // we will use a simple incrementing @p sequence string parm = "@p" + parameterNumber.ToString(); // Default to string if none of the special types DbType parmType = DbType.String; // figure out the translation from token to DbType for use in adding to the Parameters collection below switch (token.TokenType) { case TSqlTokenType.AsciiStringLiteral: case TSqlTokenType.AsciiStringOrQuotedIdentifier: case TSqlTokenType.HexLiteral: parmType = DbType.AnsiString; break; case TSqlTokenType.UnicodeStringLiteral: parmType = DbType.String; break; case TSqlTokenType.Integer: parmType = DbType.Int64; break; case TSqlTokenType.Real: parmType = DbType.Double; break; case TSqlTokenType.Numeric: parmType = DbType.Decimal; break; case TSqlTokenType.Money: parmType = DbType.Currency; break; } // Add to the SqlCommand.Parameters collection using the meta-data collected above SqlParameter p = new SqlParameter(); p.ParameterName = parm; p.DbType = parmType; switch (p.DbType) { case DbType.AnsiString: p.Value = token.Text.Substring(1, token.Text.Length - 2).Replace("''", "'"); break; case DbType.String: p.Value = token.Text.Substring(2, token.Text.Length - 3).Replace("''", "'"); break; default: p.Value = token.Text; break; } p.Direction = ParameterDirection.Input; cmd.Parameters.Add(p); }
public override void Visit(TSqlFragment node) { base.Visit(node); if (DeclareVariableElements.Count > 0 && node.StartOffset >= index) { // Get the token stream for the fragment and try to match the variables from our list IList <TSqlParserToken> stream = node.ScriptTokenStream; for (int i = node.FirstTokenIndex; i <= node.LastTokenIndex; i++) { TSqlParserToken token = stream[i]; if (token.TokenType == TSqlTokenType.Variable) { if (DeclareVariableElements.ContainsKey(token.Text)) { // Declared variable matches variable in other text. Remove it from the list. DeclareVariableElements.Remove(token.Text); } } } } if (node is DeclareVariableElement && (checkParameters && node is ProcedureParameter || !checkParameters && !(node is ProcedureParameter))) { DeclareVariableElement element = (DeclareVariableElement)node; DeclareVariableElements[element.VariableName.Value] = element; if (index < node.StartOffset + node.FragmentLength) { index = node.StartOffset + node.FragmentLength; } } }
/// <summary> /// This is the entry point into the Visitor pattern and where all the work occurs /// Note we are explicitly triggering on SqlStmt node type /// This is because we only need to parameterize the search conditions /// </summary> /// <param name="node">This is the TsqlFrament sent to us from the Vistor.Accept method</param> public override void ExplicitVisit(TSqlBatch node) { // First determine which tokens belongs to our where clause int index = node.FirstTokenIndex; int end = node.LastTokenIndex; // Now process each token in an appropriate manner while (index <= end) { // Use the TokenType to decide what processing needs to occur TSqlParserToken token = node.ScriptTokenStream[index]; // Emit the token, and if necessary add a parameter to the parameters collection EmitToken(node, token, index); // Until we have processed all the tokens associated with the where clause index++; } // Now we must emit the rest of the tokens to get our entire T-SQL script if (reparse) { // Just a bonus - part of the ScriptDom namespace; side effect it revalidates our new code FormatSQL(); } // let the base class finish up base.ExplicitVisit(node); }
private static void GetQuotedIdentifier(TSqlParserToken token, StringBuilder sb) { switch (token.TokenType) { case TSqlTokenType.Identifier: sb.Append('[').Append(token.Text).Append(']'); break; case TSqlTokenType.QuotedIdentifier: case TSqlTokenType.Dot: sb.Append(token.Text); break; default: throw new ArgumentException("Error: expected TokenType of token should be TSqlTokenType.Dot, TSqlTokenType.Identifier, or TSqlTokenType.QuotedIdentifier"); } }
public List <Replacements> GetReplacements(List <QuerySpecification> queries) { var replacements = new List <Replacements>(); if (queries.Count < 1) { return(replacements); } //as we iterate through tokens we only need the outermost query... var spec = queries.First(); TSqlParserToken lastToken = null; foreach (var token in spec.ScriptTokenStream) { if (token.TokenType == TSqlTokenType.EqualsSign && lastToken != null && lastToken.TokenType == TSqlTokenType.Bang) { var replacement = new Replacements(); var length = (token.Offset + token.Text.Length) - lastToken.Offset; replacement.Original = _script.Substring(lastToken.Offset, length); replacement.OriginalFragment = spec; replacement.OriginalLength = length; replacement.OriginalOffset = lastToken.Offset; replacement.Replacement = "<>"; replacements.Add(replacement); lastToken = token; } switch (token.TokenType) { case TSqlTokenType.WhiteSpace: case TSqlTokenType.MultilineComment: case TSqlTokenType.SingleLineComment: break; default: lastToken = token; break; } } return(replacements); }
public override void ExplicitVisit(TopRowFilter node) { WasVisited = true; SqlFragment = node; for (int i = node.FirstTokenIndex; i <= node.LastTokenIndex; i++) { TSqlParserToken token = node.ScriptTokenStream[i]; if (token.TokenType == TSqlTokenType.LeftParenthesis) { _leftParethesisCount++; } if (token.TokenType == TSqlTokenType.RightParenthesis) { _rightParethesisCount++; } } }
public override void ExplicitVisit(TSqlScript node) { WasVisited = true; SqlFragment = node; IsUpperCase = true; for (int i = node.FirstTokenIndex; i <= node.LastTokenIndex; i++) { TSqlParserToken token = node.ScriptTokenStream[i]; if (_tokenTypes.Contains(token.TokenType)) { if (!Regex.IsMatch(token.Text, "[A-Z]")) { IsUpperCase = false; break; } } } }
/// <summary> /// This is the logic for how to handle the tokens - i.e. copy to output stream, add to parameter collection etc. /// </summary> /// <param name="node">This is the TsqlFrament sent to the ExplicitVisit method</param> /// <param name="token">This is the current token that we are processing</param> /// <param name="index">The index to process</param> //protected void EmitToken(WhereClause node, TSqlParserToken token, int index) protected void EmitToken(TSqlBatch node, TSqlParserToken token, int index) { switch (token.TokenType) { // for the majority of TokenTypes we just pass the token to our StringBuilder for inclusion default: SqlStmt.Append(node.ScriptTokenStream[index].Text); break; // for those token types that may need to be parameterized we capture name, data type and value for the parameters collection case TSqlTokenType.AsciiStringLiteral: case TSqlTokenType.Real: case TSqlTokenType.Integer: case TSqlTokenType.Money: case TSqlTokenType.Numeric: case TSqlTokenType.UnicodeStringLiteral: // We just use a simple naming scheme - i.e. @p1, @p2 and so on string p = "@p" + (++parameterNumber).ToString(); SqlStmt.Append(p); // Now create the entry in the SqlCommand Parameters collection AddToParameterCollection(token, parameterNumber); break; } }
public override QvxDataTable ExtractQuery(string query, List <QvxTable> qvxTables) { QvxDataTable returnTable = null; IList <ParseError> errors = null; var parser = new TSql100Parser(true); TSqlScript script; using (TextReader reader = new StringReader(query)) { script = parser.Parse(reader, out errors) as TSqlScript; } IList <TSqlParserToken> tokens = script.ScriptTokenStream; // get record folder TSqlParserToken fromToken = tokens.First(t => t.TokenType == TSqlTokenType.From); int indexOfFromToken = tokens.IndexOf(fromToken); IEnumerable <TSqlParserToken> tableTokens = tokens.Skip(indexOfFromToken); TSqlParserToken identifier = tableTokens.First(t => t.TokenType == TSqlTokenType.Identifier || t.TokenType == TSqlTokenType.AsciiStringOrQuotedIdentifier); string folderName = identifier.Text; if (identifier.TokenType == TSqlTokenType.AsciiStringOrQuotedIdentifier) { folderName = folderName.Substring(1, folderName.Length - 2); } // get report name tableTokens = tokens.Skip(tokens.IndexOf(identifier)); TSqlParserToken reportSeparator = tableTokens.First(t => t.TokenType == TSqlTokenType.Dot); tableTokens = tokens.Skip(tokens.IndexOf(reportSeparator)); TSqlParserToken reportNameToken = tableTokens.First(t => t.TokenType == TSqlTokenType.Identifier || t.TokenType == TSqlTokenType.AsciiStringOrQuotedIdentifier); string reportName = reportNameToken.Text; if (reportNameToken.TokenType == TSqlTokenType.AsciiStringOrQuotedIdentifier) { reportName = reportName.Substring(1, reportName.Length - 2); } if (this.MParameters.ContainsKey("folder_name")) { if (folderName == this.MParameters["folder_name"] && this.MTables == null) { this.Init(); } else if (folderName != this.MParameters["folder_name"]) { this.MParameters["folder_name"] = folderName; this.Init(); } } else { this.MParameters.Add("folder_name", folderName); this.Init(); } var newTable = this.FindTable(reportName, this.MTables); returnTable = new QvxDataTable(newTable); return(returnTable); }
public static int GetColumnNumberAfterToken(int tabsOnLine, TSqlParserToken token) { return(token.Column + token.Text.Length + ((tabsOnLine * Constants.TabWidth) - tabsOnLine)); }
public static int GetColumnNumberBeforeToken(int tabsOnLine, TSqlParserToken token) { return(token.Column + ((tabsOnLine * Constants.TabWidth) - tabsOnLine)); }
private int AdjustColumnForDymamicSQL(TSqlParserToken node) { return(node.Line == DynamicSqlStartLine ? DynamicSqlStartColumn : 0); }
private static string ChangeCreateToAlter(CreateFunctionStatement function, string wholeScript) { //get part of script we are interested in... var subScript = wholeScript.Substring(function.StartOffset, function.FragmentLength); IList<ParseError> errors; var fragment = new TSql130Parser(false).Parse(new StringReader(subScript), out errors); bool haveCreate = false; var output = new StringBuilder(); foreach (var token in fragment.ScriptTokenStream) { if (!haveCreate && token.TokenType == TSqlTokenType.Create) { var alterToken = new TSqlParserToken(TSqlTokenType.Alter, "alter"); output.Append(alterToken.Text); haveCreate = true; continue; } output.Append(token.Text); } return output.ToString(); }
public void WriteToken(TSqlParserToken token) { _writer.Write(token.Text); }
private void SendFeedBack(int errorNum, TSqlParserToken errorToken) { Console.WriteLine(errorNum.ToString(CultureInfo.InvariantCulture)); // TODO : For future, may need offset from following token }
private static bool IsKeyword(TSqlParserToken sqlParserToken) { return(sqlParserToken.IsKeyword() || _additionalKeywords.Any(p => p == sqlParserToken.Text.ToUpper())); }
public static void PutFragmentSpan(IQsiTreeNode node, TSqlParserToken first, TSqlParserToken last) { node.UserData?.PutData(QsiNodeProperties.Span, new Range(first.Offset, last.Offset + last.Text.Length)); }
private static bool IsKeyword(TSqlParserToken token) { return(token.IsKeyword()); }
private static bool IsKeyword(TSqlParserToken sqlParserToken) { return sqlParserToken.IsKeyword() || _additionalKeywords.Any(p => p == sqlParserToken.Text.ToUpper()); }