} // End Sub LexerTest static void WalkerTest(string text) { try { System.IO.StringReader reader = new System.IO.StringReader(text); Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokens = new Antlr4.Runtime.CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); // Specify our entry point // TSqlParser.Query_specificationContext TSqlParser.Tsql_fileContext fileContext = parser.tsql_file(); // Antlr4.Runtime.Tree.IParseTree root = (Antlr4.Runtime.Tree.IParseTree)fileContext; // TSqlParser.Query_specificationContext tsqlParser.Tsql_fileContext fileContext = parser.tsql_file(); System.Console.WriteLine("fileContext.ChildCount = " + fileContext.ChildCount.ToString()); // Walk it and attach our listener Antlr4.Runtime.Tree.ParseTreeWalker walker = new Antlr4.Runtime.Tree.ParseTreeWalker(); // AntlrTsqListener listener = new AntlrTsqListener(); EverythingListener listener = new EverythingListener(); walker.Walk(listener, fileContext); } catch (System.Exception e) { System.Console.WriteLine(e.Message); } } // End Sub WalkerTest
} // End Sub WalkerTest static void VisitorTest(string text) { try { System.IO.StringReader reader = new System.IO.StringReader(text); Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokens = new Antlr4.Runtime.CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); //Specify our entry point // TSqlParser.Query_specificationContext TSqlParser.Tsql_fileContext fileContext = parser.tsql_file(); System.Console.WriteLine("fileContext.ChildCount = " + fileContext.ChildCount.ToString()); SqlVisitor vis = new SqlVisitor(); string s = vis.Visit(fileContext); System.Console.WriteLine(s); } catch (System.Exception e) { System.Console.WriteLine(e.Message); } } // End Sub VisitorTest
static void ParseDDLClause(string input) { AntlrInputStream inputStream = new AntlrInputStream(input); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); var errorHandler = new SyntaxErrorListener(); parser.AddErrorListener(errorHandler); var parseTree = parser.ddl_clause(); ParseTreeWalker walker = new ParseTreeWalker(); TSqlParserListenerExtended loader = new TSqlParserListenerExtended(new SelectStatement()); loader.TokenStream = tokens; walker.Walk(loader, parseTree); Console.WriteLine("Parse Tree:"); Console.WriteLine(parseTree.ToStringTree(parser)); Console.WriteLine("Errors:"); foreach (var error in errorHandler.Errors) { Console.WriteLine(error.Message + " at position " + error.Line.ToString() + ":" + error.CharPositionInLine.ToString()); } Console.Write("Press enter key to continue"); Console.ReadLine(); }
static void Main(string[] args) { string lQuery = "SELECT Jmeno, Prijmeni FROM Osoba, Film WHERE Stat LIKE 'Kanada' OR Rok_natoceni >1995 UNION " + "(SELECT Jmeno, Prijmeni FROM Uzivatel WHERE Jmeno = 'ads' AND Prijmeni = 'asd');"; string lQuery2 = "SElect jmeno, nazev_cz FROM Osoba os JOIN Osoba_Film osf ON os.idO = osf.Osoba_idO JOIN Film f ON f.idF = osf.Film_idF Where f.Rok_natoceni > 1995;"; string lQuery1 = "select * from table1 t1, t2 where t1.neco = 1"; string text = ReadFile($@"C:\Users\Lukáš\Desktop\doc\Workload_bp.txt"); StringReader reader = new StringReader(text); AntlrInputStream input = new AntlrInputStream(reader); TSqlLexer lexer = new TSqlLexer(new CaseChangingCharStream(input, true)); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); TSqlParser.Tsql_fileContext Tsql_fileContext1 = parser.tsql_file(); //Console.WriteLine("Tsql_fileContext1.ChildCount = " + Tsql_fileContext1.ChildCount.ToString()); Antlr4.Runtime.Tree.ParseTreeWalker walker = new Antlr4.Runtime.Tree.ParseTreeWalker(); AntlrTsqlListener listener = new AntlrTsqlListener(); walker.Walk(listener, Tsql_fileContext1); foreach (var nTable in listener.AnalyzedWorkload) { Console.WriteLine("Tabulka " + nTable.Name); foreach (var nColumn in nTable.Columns) { foreach (var nCondition in nColumn.Conditions) { Console.WriteLine($" col { nCondition.ColumnName} operator { nCondition.Operator} val {nCondition.Value}"); } } } Console.ReadKey(); }
public void load_string(string str) { var inputStream = new AntlrInputStream(str); var lexer = new TSqlLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var parser = new TSqlParser(commonTokenStream); if (!noerrorlistener) { parser.RemoveErrorListeners(); parser.AddErrorListener(new ParserErrorListener()); } Type t = parser.GetType(); MethodInfo mi = t.GetMethod(start_rulename); if (mi == null) { throw new ArgumentException("rulename " + start_rulename + " not found."); } ParserRuleContext graphContext = (ParserRuleContext)mi.Invoke(parser, new object[] { }); //ParserRuleContext graphContext = parser.tsql_file(); var token_names = parser.TokenNames; var rule_names = parser.RuleNames; var tree_parser = new TreeParser(token_names, rule_names); root = tree_parser.parse(graphContext); }
public static TSqlParser GetParser(string sql) { var stream = new AntlrInputStream(sql); var caseInsensitiveStream = new CaseChangingCharStream(stream, upper: true); TSqlLexer lexer = new TSqlLexer(caseInsensitiveStream); var tokenStream = new CommonTokenStream(lexer); return(new TSqlParser(tokenStream)); }
private void Boo(string input) { Debug.WriteLine("Boo"); AntlrInputStream antlrInput = new AntlrInputStream(input); TSqlLexer lexer = new TSqlLexer(antlrInput); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); TSqlParserListenerExtended listener = new TSqlParserListenerExtended(); parser.search_condition().EnterRule(listener); }
private FrostIDMLStatement GetDMLStatement(string input, string databaseName) { FrostIDMLStatement result = null; TSqlParserListenerExtended loader; var sqlStatement = string.Empty; if (HasParticipant(input)) { sqlStatement = RemoveParticipantKeyword(input); } else { sqlStatement = input; } AntlrInputStream inputStream = new AntlrInputStream(sqlStatement); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); var parseTree = parser.dml_clause(); ParseTreeWalker walker = new ParseTreeWalker(); loader = new TSqlParserListenerExtended(GetDMLStatementType(sqlStatement), sqlStatement); loader.TokenStream = tokens; walker.Walk(loader, parseTree); if (loader.DMLStatement is InsertStatement) { var item = loader.DMLStatement as InsertStatement; item.Participant = GetParticipant(GetParticipantString(input)); item.ParticipantString = GetParticipantString(input); item.DatabaseName = databaseName; if (item.Participant is null) { item.Participant = new Participant(_process.GetLocation()); } result = item; } else if (loader.DMLStatement is UpdateStatement) { var item = loader.DMLStatement as UpdateStatement; item.DatabaseName = databaseName; item.SetProcess(_process); result = item as FrostIDMLStatement; } else { result = loader.DMLStatement; } return(result); }
/// <summary> /// Parses the TSQL. /// </summary> /// <param name="input">The input.</param> /// <param name="listener">The listener.</param> private static void ParseTSQL(string input, IParseTreeListener listener) { ICharStream Stream = CharStreams.fromstring(input); ITokenSource Lexer = new TSqlLexer(Stream, TextWriter.Null, TextWriter.Null); ITokenStream Tokens = new CommonTokenStream(Lexer); TSqlParser Parser = new TSqlParser(Tokens, TextWriter.Null, TextWriter.Null) { BuildParseTree = true }; IParseTree tree = Parser.tsql_file(); ParseTreeWalker.Default.Walk(listener, tree); }
public static IParseTree ParseTree(string input) { ICharStream Stream = CharStreams.fromstring(input); ITokenSource Lexer = new TSqlLexer(Stream, TextWriter.Null, TextWriter.Null); ITokenStream Tokens = new CommonTokenStream(Lexer); TSqlParser Parser = new TSqlParser(Tokens, TextWriter.Null, TextWriter.Null) { BuildParseTree = true }; IParseTree tree = Parser.tsql_file(); return(tree); }
private void Bar(string input) { Debug.WriteLine("Bar"); AntlrInputStream inputStream = new AntlrInputStream(input); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); var parseTree = parser.select_statement(); ParseTreeWalker walker = new ParseTreeWalker(); TSqlParserListenerExtended loader = new TSqlParserListenerExtended(); walker.Walk(loader, parseTree); }
private TableInfo GetTableInfo(string script) { var input = new AntlrInputStream(script); var caseChangingStream = new CaseChangingCharStream(input, true); var lexer = new TSqlLexer(caseChangingStream); var tokens = new CommonTokenStream(lexer); var parser = new TSqlParser(tokens); var tree = parser.tsql_file(); var walker = new ParseTreeWalker(); var listener = new SqlListener(); walker.Walk(listener, tree); return(listener.TableInfo); }
private void ParseInput(string input) { AntlrInputStream inputStream = new AntlrInputStream(input); TSqlLexer sqlLexer = new TSqlLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(sqlLexer); TSqlParser sqlParser = new TSqlParser(commonTokenStream); TSqlParserListenerExtended listener = new TSqlParserListenerExtended(); TSqlParser.ExpressionContext expressionContext = sqlParser.expression(); //TSqlVisitor visitor = new TSqlVisitor(); //visitor.Visit(expressionContext); MessageBox.Show("Done"); }
public void ProcessWorkload(string aScriptPath, EventHandler <string> aOnAnalyzingError) { string lText = ReadFile(aScriptPath); StringReader lReader = new StringReader(lText); AntlrInputStream lInput = new AntlrInputStream(lReader); TSqlLexer lLexer = new TSqlLexer(new CaseChangingCharStream(lInput, true)); CommonTokenStream lTokens = new CommonTokenStream(lLexer); TSqlParser lParser = new TSqlParser(lTokens); TSqlParser.Tsql_fileContext Tsql_fileContext1 = lParser.tsql_file(); Antlr4.Runtime.Tree.ParseTreeWalker lWalker = new Antlr4.Runtime.Tree.ParseTreeWalker(); AntlrTsqlListener lListener = new AntlrTsqlListener(); lListener.OnAnalyzingError += aOnAnalyzingError; lWalker.Walk(lListener, Tsql_fileContext1); Tables = lListener.AnalyzedWorkload; }
public IEnumerable <SearchResult> Search(CancellationToken cancelToken = default(CancellationToken)) { var lexer = new TSqlLexer(this.Stream); lexer.RemoveErrorListeners(); var tokenStream = new CommonTokenStream(lexer); tokenStream.Fill(); foreach (var token in tokenStream.GetTokens()) { if (TryGetStringLiteralResult(token, out var result)) { yield return(result); } } }
public static System.Collections.Generic.List <string> GetVariableNames() { System.Collections.Generic.List <string> ls = new System.Collections.Generic.List <string>(); string text = @" SELECT BE_Name FROM T_Benutzer WHERE Name =@username OR Name LIKE '%' + @foo + '%' "; System.IO.StringReader reader = new System.IO.StringReader(text); // Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.ICharStream input1 = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.CaseChangingCharStream input = new Antlr4.Runtime.CaseChangingCharStream(input1, true); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); int lastIndex = 0; foreach (Antlr4.Runtime.IToken token in tokenStream.GetTokens()) { // System.Console.WriteLine(token.Text); string tokenTypeName = lexer.Vocabulary.GetSymbolicName(token.Type); Antlr4.Runtime.Misc.Interval ival = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StopIndex); string extracted = token.InputStream.GetText(ival); // table_name, cte_name: ID, SQUARE_BRACKET_ID // Local variables: LOCAL_ID if (token.Type == TSqlLexer.LOCAL_ID) { extracted = extracted.Trim(new char[] { ' ', '\t', '\v', '\r', '\n' }); ls.Add(extracted); } // End if (token.Type == TSqlLexer.LOCAL_ID) lastIndex = token.StopIndex + 1; } // Next token return(ls); }
static void ParseInput(string input) { AntlrInputStream inputStream = new AntlrInputStream(input); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); var errorHandler = new SyntaxErrorListener(); parser.AddErrorListener(errorHandler); var parseTree = parser.dml_clause(); ParseTreeWalker walker = new ParseTreeWalker(); TSqlParserListenerExtended loader = new TSqlParserListenerExtended(new SelectStatement()); loader.TokenStream = tokens; walker.Walk(loader, parseTree); Console.WriteLine("Parse Tree:"); Console.WriteLine(parseTree.ToStringTree(parser)); Console.WriteLine("Errors:"); foreach (var error in errorHandler.Errors) { Console.WriteLine(error.Message + " at position " + error.Line.ToString() + ":" + error.CharPositionInLine.ToString()); } if (input.Contains("SELECT")) { var selectStatement = loader.GetStatementAsSelect(); var text = JsonConvert.SerializeObject(selectStatement); Console.WriteLine("Review Parse. Press any key to continue."); Console.ReadLine(); Console.WriteLine(text); Console.WriteLine("Executing Generated Plan"); GeneratePlan(selectStatement); } Console.Write("Press enter key to continue"); Console.ReadLine(); }
private void ParseListener(string input) { TSqlLexer lexer = new TSqlLexer(new AntlrInputStream(input)); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); TSqlParser.Select_statementContext selectStatement = new TSqlParser.Select_statementContext(parser.Context, 0); TSqlParser.Select_list_elemContext selectElements = new TSqlParser.Select_list_elemContext(parser.Context, 0); TSqlParser.Select_listContext selectListContext = new TSqlParser.Select_listContext(parser.Context, 0); TSqlParser.Sql_clauseContext sqlClauseContext = new TSqlParser.Sql_clauseContext(parser.Context, 0); TSqlParser.Sql_clausesContext clasesContext = new TSqlParser.Sql_clausesContext(parser.Context, 0); ParseTreeWalker walker = new ParseTreeWalker(); TSqlParserListenerExtended listener = new TSqlParserListenerExtended(); walker.Walk(listener, selectStatement); walker.Walk(listener, selectListContext); walker.Walk(listener, selectElements); walker.Walk(listener, sqlClauseContext); walker.Walk(listener, clasesContext); }
private void Boom(string input) { Debug.WriteLine("Boom"); AntlrInputStream inputStream = new AntlrInputStream(input); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); // these all appear to work in some way var parseTree = parser.dml_clause(); //var parseTree = parser.query_specification(); //var parseTree = parser.query_expression(); ParseTreeWalker walker = new ParseTreeWalker(); TSqlParserListenerExtended loader = new TSqlParserListenerExtended(); walker.Walk(loader, parseTree); Debug.WriteLine("---"); Debug.WriteLine(parseTree.ToStringTree(parser)); var item = lexer.Vocabulary; Debug.WriteLine(""); }
private FrostIDDLStatement GetDDLStatement(string input, string databaseName) { FrostIDDLStatement result = null; TSqlParserListenerExtended loader; var sqlStatement = string.Empty; if (HasParticipant(input)) { sqlStatement = RemoveParticipantKeyword(input); } else { sqlStatement = input; } AntlrInputStream inputStream = new AntlrInputStream(sqlStatement); TSqlLexer lexer = new TSqlLexer(inputStream); CommonTokenStream tokens = new CommonTokenStream(lexer); TSqlParser parser = new TSqlParser(tokens); var parseTree = parser.ddl_clause(); ParseTreeWalker walker = new ParseTreeWalker(); loader = new TSqlParserListenerExtended(GetDDLStatementType(sqlStatement), sqlStatement); loader.TokenStream = tokens; walker.Walk(loader, parseTree); if (loader.IsStatementCreateTable()) { result = loader.GetStatementAsCreateTable(); } if (loader.IsStatementCreateDatabase()) { result = loader.GetStatementAsCreateDatabase(); } return(result); }
static void SplitMultiTableInsertScript() { string fileName = @"D:\SQL\TESS\Anlage_Refdaten.txt"; fileName = @"D:\SQL\TESS\Adressdaten.txt"; fileName = @"D:\SQL\TESS\Anlagedaten.txt"; fileName = @"D:\SQL\TESS\Anlagerechte.txt"; fileName = @"D:\SQL\TESS\Kontaktdaten.txt"; fileName = @"D:\SQL\TESS\Navigation.txt"; fileName = @"D:\username\Desktop\Raumdaten\Raumdaten.sql"; fileName = @"D:\username\Desktop\Raumdaten\Vertragsdaten.sql"; System.Text.StringBuilder sb = new System.Text.StringBuilder(); // https://github.com/antlr/grammars-v4/tree/master/tsql // https://github.com/antlr/grammars-v4/tree/master/plsql/CSharp System.Text.Encoding enc = GetSystemEncoding(); string text = System.IO.File.ReadAllText(fileName, enc); System.IO.StringReader reader = new System.IO.StringReader(text); // Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.ICharStream input1 = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.CaseChangingCharStream input = new Antlr4.Runtime.CaseChangingCharStream(input1, true); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); int lastIndex = 0; System.Collections.Generic.Dictionary <string, System.Collections.Generic.List <string> > dict = new System.Collections.Generic.Dictionary <string, System.Collections.Generic.List <string> >(System.StringComparer.InvariantCultureIgnoreCase); System.Collections.Generic.List <string> lsTableName = new System.Collections.Generic.List <string>(); bool ignoreThis = true; bool partOfTableName = false; int lastTokenType = -1; int secondLastTokenType = -1; foreach (Antlr4.Runtime.IToken token in tokenStream.GetTokens()) { // System.Console.WriteLine(token.Text); string tokenTypeName = lexer.Vocabulary.GetSymbolicName(token.Type); Antlr4.Runtime.Misc.Interval ival = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StopIndex); string extracted = token.InputStream.GetText(ival); extracted = extracted.Trim(new char[] { '\t', '\v', '\r', '\n' }); if (token.Type == TSqlLexer.INSERT) { if (sb.Length > 0) { string tn = string.Join(".", lsTableName.ToArray()).Replace("[", "").Replace("]", "").Trim(); lsTableName.Clear(); System.Console.WriteLine(tn); if (!dict.ContainsKey(tn)) { dict[tn] = new System.Collections.Generic.List <string>(); } sb.Append(";"); dict[tn].Add(sb.ToString()); } sb.Clear(); ignoreThis = false; partOfTableName = true; } else if (token.Type == TSqlLexer.GO) { ignoreThis = true; partOfTableName = false; } else if (token.Type == TSqlLexer.USE) { ignoreThis = true; partOfTableName = false; } else if (token.Type == TSqlLexer.SEMI) { ignoreThis = true; partOfTableName = false; } else if (token.Type == TSqlLexer.Eof) { } else if (token.Type == TSqlLexer.LR_BRACKET) { partOfTableName = false; } else if (token.Type == TSqlLexer.RR_BRACKET) { } else if (token.Type == TSqlLexer.COMMA) { } else if (token.Type == TSqlLexer.INTO) { } else if (token.Type == TSqlLexer.VALUES || token.Type == TSqlLexer.SELECT) { } else if (token.Type == TSqlLexer.ID || token.Type == TSqlLexer.SQUARE_BRACKET_ID) { if (partOfTableName) { lsTableName.Add(extracted); } } else if (token.Type == TSqlLexer.DOT) { } else if (token.Type == TSqlLexer.STRING) { } else if (token.Type == TSqlLexer.DECIMAL) { } else if (token.Type == TSqlLexer.FLOAT) { } else if (token.Type == TSqlLexer.NULL) { } else if (token.Type == TSqlLexer.CAST) { } else if (token.Type == TSqlLexer.AS) { // CAST(xxx AS datetime) } else if (token.Type == TSqlLexer.MINUS) { // Negative Number } else { System.Console.WriteLine(tokenTypeName); } // System.Console.WriteLine((extracted)); if (!ignoreThis && token.Type != TSqlLexer.SEMI) { sb.Append(extracted); } // System.Console.WriteLine(token.Text); // System.Console.WriteLine(token.Type); // System.Console.WriteLine(tokenTypeName); lastIndex = token.StopIndex + 1; secondLastTokenType = lastTokenType; lastTokenType = token.Type; } // Next token if (sb.Length > 0) { string tn = string.Join(".", lsTableName.ToArray()).Replace("[", "").Replace("]", "").Trim(); lsTableName.Clear(); System.Console.WriteLine(tn); if (!dict.ContainsKey(tn)) { dict[tn] = new System.Collections.Generic.List <string>(); } sb.Append(";"); dict[tn].Add(sb.ToString()); } // End if (sb.Length > 0) sb.Clear(); sb = null; string baseDir = System.IO.Path.GetFileNameWithoutExtension(fileName); string outputDirectory = System.IO.Path.GetDirectoryName(fileName); baseDir = System.IO.Path.Combine(outputDirectory, baseDir); if (!System.IO.Directory.Exists(baseDir)) { System.IO.Directory.CreateDirectory(baseDir); } foreach (System.Collections.Generic.KeyValuePair <string, System.Collections.Generic.List <string> > kvp in dict) { string dir = kvp.Key; string content = string.Join("\r\n\r\n", kvp.Value.ToArray()); System.Console.WriteLine(content); string fn = System.IO.Path.Combine(baseDir, kvp.Key + ".sql"); System.IO.File.WriteAllText(fn, content, System.Text.Encoding.UTF8); } // Next kvp System.Console.WriteLine(dict); } // End Sub SplitMultiTableInsertScript
} // End Sub SplitMultiTableInsertScript static string SubstituteVariablesTest() { // https://github.com/antlr/grammars-v4/tree/master/tsql // https://github.com/antlr/grammars-v4/tree/master/plsql/CSharp string text = @" DECLARE @legalEntity int -- SET @legalEntity = 1 ;WITH CTE AS ( SELECT 1 AS id, 123 AS abc UNION SELECT 2 AS id, 456 AS abc UNION SELECT 3 AS id, 789 AS abc UNION SELECT 4 AS id, 012 AS abc UNION SELECT 5 AS id, 345 AS abc UNION SELECT 6 AS id, 678 AS abc ) SELECT * ,'@legalEntity' AS abcdef -- strings do not get substituted ,987 AS [@legalEntity] -- identifiers do not get substituted FROM CTE WHERE (1=1) AND ( '0' IN (@legalEntity, @legalEntity ) OR CTE.id IN (@legalEntity) -- CTE.id IN (@legalEntity /* @legalEntity */) ) /* ==> AND ( '0' IN (1,2,6) OR CTE.id IN (1,2,6) -- OR CTE.id IN (1,2,3,4,5,6 /* 1,2,3,4,5,6 */) ) */ "; System.IO.StringReader reader = new System.IO.StringReader(text); // Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.ICharStream input1 = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.CaseChangingCharStream input = new Antlr4.Runtime.CaseChangingCharStream(input1, true); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); System.Text.StringBuilder sb = new System.Text.StringBuilder(); int lastIndex = 0; foreach (Antlr4.Runtime.IToken token in tokenStream.GetTokens()) { // System.Console.WriteLine(token.Text); string tokenTypeName = lexer.Vocabulary.GetSymbolicName(token.Type); #if NO_COMMENTS if (token.Type == TSqlLexer.LINE_COMMENT || token.Type == TSqlLexer.COMMENT || token.Type == TSqlLexer.Eof) { Antlr4.Runtime.Misc.Interval blankInterval = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StartIndex - 1); string extractedBlank = token.InputStream.GetText(blankInterval); if (string.IsNullOrEmpty(extractedBlank)) { sb.Append(" "); } else { sb.Append(extractedBlank); } lastIndex = token.StopIndex + 1; continue; } // End if comment #endif // sql += token.Text + " "; Antlr4.Runtime.Misc.Interval ival = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StopIndex); string extracted = token.InputStream.GetText(ival); // table_name, cte_name: ID, SQUARE_BRACKET_ID // Local variables: LOCAL_ID if (token.Type == TSqlLexer.LOCAL_ID) { extracted = extracted.Trim(new char[] { ' ', '\t', '\v', '\r', '\n' }); System.Console.WriteLine(extracted); } // End if (token.Type == TSqlLexer.LOCAL_ID) // System.Console.WriteLine((extracted)); sb.Append(extracted); // System.Console.WriteLine(token.Text); // System.Console.WriteLine(token.Type); // System.Console.WriteLine(tokenTypeName); lastIndex = token.StopIndex + 1; } // Next token string sql = sb.ToString(); sb.Clear(); sb = null; System.Console.WriteLine(sql); return(sql); } // End Sub SubstituteVariablesTest
} // End Sub CommentRemoverLexerTest // https://github.com/dotjpg3141/Strings static void LexerTest(string text) { try { System.IO.StringReader reader = new System.IO.StringReader(text); // Antlr4.Runtime.AntlrInputStream input = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.ICharStream input1 = new Antlr4.Runtime.AntlrInputStream(reader); Antlr4.Runtime.CaseChangingCharStream input = new Antlr4.Runtime.CaseChangingCharStream(input1, true); TSqlLexer lexer = new TSqlLexer(input); Antlr4.Runtime.CommonTokenStream tokenStream = new Antlr4.Runtime.CommonTokenStream(lexer); tokenStream.Fill(); System.Text.StringBuilder sb = new System.Text.StringBuilder(); int lastIndex = 0; foreach (Antlr4.Runtime.IToken token in tokenStream.GetTokens()) { // System.Console.WriteLine(token.Text); string tokenTypeName = lexer.Vocabulary.GetSymbolicName(token.Type); if (token.Type == TSqlLexer.LINE_COMMENT || token.Type == TSqlLexer.COMMENT || token.Type == TSqlLexer.Eof) { Antlr4.Runtime.Misc.Interval blankInterval = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StartIndex - 1); string extractedBlank = token.InputStream.GetText(blankInterval); if (string.IsNullOrEmpty(extractedBlank)) { sb.Append(" "); } else { sb.Append(extractedBlank); } lastIndex = token.StopIndex + 1; continue; } // End if comment // sql += token.Text + " "; Antlr4.Runtime.Misc.Interval ival = new Antlr4.Runtime.Misc.Interval(lastIndex, token.StopIndex); string extracted = token.InputStream.GetText(ival); // System.Console.WriteLine((extracted)); sb.Append(extracted); // System.Console.WriteLine(token.Text); // System.Console.WriteLine(token.Type); // System.Console.WriteLine(tokenTypeName); lastIndex = token.StopIndex + 1; } // Next token string sql = sb.ToString(); sb.Clear(); sb = null; System.Console.WriteLine(sql); } catch (System.Exception e) { System.Console.WriteLine(e.Message); } } // End Sub LexerTest