public override SqlString GetLimitString(SqlString querySqlString, SqlString offset, SqlString limit) { var tokenEnum = new SqlTokenizer(querySqlString).GetEnumerator(); if (!tokenEnum.TryParseUntilFirstMsSqlSelectColumn()) return null; var result = new SqlStringBuilder(querySqlString); if (!tokenEnum.TryParseUntil("order")) { result.Add(" ORDER BY CURRENT_TIMESTAMP"); } result.Add(" OFFSET "); if (offset != null) { result.Add(offset).Add(" ROWS"); } else { result.Add("0 ROWS"); } if (limit != null) { result.Add(" FETCH FIRST ").Add(limit).Add(" ROWS ONLY"); } return result.ToSqlString(); }
public void AliasParseTest() { var sql = @"select count(*) from innovator.[transaction_request] group by id"; var parsed = new SqlTokenizer(sql).Parse(); Assert.IsTrue(string.IsNullOrEmpty(parsed.OfType<SqlName>().First().Alias)); }
private SqlString PageByLimitOnly(SqlString limit) { var tokenEnum = new SqlTokenizer(_sourceQuery).GetEnumerator(); if (!tokenEnum.TryParseUntilFirstMsSqlSelectColumn()) return null; int insertPoint = tokenEnum.Current.SqlIndex; return _sourceQuery.Insert(insertPoint, new SqlString("TOP (", limit, ") ")); }
public IEnumerable<string> GetParameterNames(string query) { var tokens = new SqlTokenizer(query).ToArray(); var parsed = SqlTokenizer.Parse(tokens); var declares = new SqlDeclares(parsed); var declared = new HashSet<string>(declares.Names ?? Enumerable.Empty<string>()); return new SqlTokenizer(query) .OfType<SqlLiteral>() .Where(t => t.Text[0] == '@' && !t.Text.StartsWith("@@") && !declared.Contains(t.Text)) .Select(t => t.Text.Substring(1)) .Distinct(); }
public void SelectColumnNamesAsteriskTest() { var sql = @"select 1 + 3 count , * , tr.* , tr2.* /* not-named */ , 3 , created_by_id from innovator.[transaction_request] tr group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var correct = new string[] { "count", "*", "tr.*", "tr2.*", "created_by_id" }; var names = parsed.GetColumnNames().ToArray(); CollectionAssert.AreEqual(correct, names); }
public void SqlTestUnfinishedExpr() { var sql = @"select max(a.CAN_GET) CAN_GET from innovator.[ALIAS] al and ISNULL(al. where a.SOURCE_ID = @permission ; select parent, child from innovator.[IDENTITY] i ;"; var parsed = new SqlTokenizer(sql).Parse(); Assert.AreEqual(2, parsed.Count); }
public async void SimpleStringWithBody_ReturnsTheBody() { var actualTask = SqlTokenizer.TokenizeAsync("'test'"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.StringStart, 1, 1)); expected[0].Text = "'"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringBody, 1, 2)); expected[1].Text = "test"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringEnd, 1, 6)); expected[2].Text = "'"; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
public void SelectColumnNamesSubSelectNoNameTest() { var sql = @"select *, 1 + 2 count from ( select * from ( select *, 3 + 4 thing from another ) b ) d group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var ctx = new SqlContext(parsed); var names = ctx.Tables.Single().Columns.ToArray(); //CollectionAssert.AreEqual(correct, names); }
public async void StringEndingWithEscapedQuote_ReturnsCorrectly() { var actualTask = SqlTokenizer.TokenizeAsync("' '''"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.StringStart, 1, 1)); expected[0].Text = "'"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringBody, 1, 2)); expected[1].Text = " '"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringEnd, 1, 5)); expected[2].Text = "'"; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
/// <summary> /// THis code is not used yet but will likely be used in the future. /// </summary> /// <param name="sql"></param> /// <returns></returns> private ArrayList TokenizeSql2(string sql) { ArrayList sqlChunks = new ArrayList(); StringBuilder currentChunk = new StringBuilder(); bool batch = Connection.Settings.AllowBatch & Driver.SupportsBatch; int lastPos = 0; SqlTokenizer tokenizer = new SqlTokenizer(sql); string sql_mode = Connection.driver.Property("sql_mode"); if (sql_mode != null) { sql_mode = sql_mode.ToString().ToLower(); tokenizer.AnsiQuotes = sql_mode.IndexOf("ansi_quotes") != -1; tokenizer.BackslashEscapes = sql_mode.IndexOf("no_backslash_escapes") == -1; } string token = tokenizer.NextToken(); while (token != null) { if (token == ";" && !batch) { sqlChunks.Add(currentChunk.ToString()); currentChunk.Remove(0, currentChunk.Length); } else if (token.Length >= 2 && ((token[0] == '@' && token[1] != '@') || token[0] == '?')) { sqlChunks.Add(currentChunk.ToString()); currentChunk.Remove(0, currentChunk.Length); } else { currentChunk.Append(sql.Substring(lastPos, tokenizer.Index - lastPos + 1)); lastPos = tokenizer.Index; } token = tokenizer.NextToken(); } if (currentChunk.Length > 0) { sqlChunks.Add(currentChunk.ToString()); } return(sqlChunks); }
private static void Verify(string input, string[] tokens, bool skipComments = true) { var tokenizer = new SqlTokenizer(input); tokenizer.SkipComments = skipComments; foreach (string token in tokens) { Assert.IsTrue(tokenizer.HasMoreTokens); tokenizer.ReadNextToken(); Assert.AreEqual(token, tokenizer.Current.Value); } tokenizer.ReadNextToken(); Assert.IsFalse(tokenizer.HasMoreTokens); }
public async void JunkFollowedByWhitespaceAndLineComment_ReturnsCorrectResult() { var actualTask = SqlTokenizer.TokenizeAsync("lijdfisuyndfk --this is junk"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.Unknown, 1, 1)); expected[0].Text = "lijdfisuyndfk"; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 14)); expected[1].Text = " "; expected.Add(new SqlToken(SqlToken.TokenTypes.LineCommentStart, 1, 15)); expected[2].Text = "--"; expected.Add(new SqlToken(SqlToken.TokenTypes.LineCommentBody, 1, 17)); expected[3].Text = "this is junk"; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
public async void BlockCommentWithInternalBlockCommentStart_ReturnsCorrectResult() { var actualTask = SqlTokenizer.TokenizeAsync("/*\r\n/**/"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.BlockCommentStart, 1, 1)); expected[0].Text = "/*"; expected.Add(new SqlToken(SqlToken.TokenTypes.Newline, 1, 3)); expected[1].Text = "\r\n"; expected.Add(new SqlToken(SqlToken.TokenTypes.BlockCommentBody, 2, 1)); expected[2].Text = "/*"; expected.Add(new SqlToken(SqlToken.TokenTypes.BlockCommentEnd, 2, 3)); expected[3].Text = "*/"; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
private void ParseConstraint(DataTable fkTable, DataRow table, SqlTokenizer tokenizer, bool includeColumns) { string str = tokenizer.NextToken(); DataRow row = fkTable.NewRow(); string str2 = tokenizer.NextToken(); if ((str2 == "foreign") && !tokenizer.Quoted) { tokenizer.NextToken(); tokenizer.NextToken(); row["CONSTRAINT_CATALOG"] = table["TABLE_CATALOG"]; row["CONSTRAINT_SCHEMA"] = table["TABLE_SCHEMA"]; row["TABLE_CATALOG"] = table["TABLE_CATALOG"]; row["TABLE_SCHEMA"] = table["TABLE_SCHEMA"]; row["TABLE_NAME"] = table["TABLE_NAME"]; row["REFERENCED_TABLE_CATALOG"] = null; row["CONSTRAINT_NAME"] = str; ArrayList srcColumns = includeColumns ? this.ParseColumns(tokenizer) : null; while ((str2 != "references") || tokenizer.Quoted) { str2 = tokenizer.NextToken(); } string str3 = tokenizer.NextToken(); string str4 = tokenizer.NextToken(); if (str4.StartsWith(".")) { row["REFERENCED_TABLE_SCHEMA"] = str3; row["REFERENCED_TABLE_NAME"] = str4.Substring(1); tokenizer.NextToken(); } else { row["REFERENCED_TABLE_SCHEMA"] = table["TABLE_SCHEMA"]; row["REFERENCED_TABLE_NAME"] = str3; } ArrayList targetColumns = includeColumns ? this.ParseColumns(tokenizer) : null; if (includeColumns) { this.ProcessColumns(fkTable, row, srcColumns, targetColumns); } else { fkTable.Rows.Add(row); } } }
public string GetCurrentQuery(ITextSource source, int offset) { var text = source.Text; var parseTree = new SqlTokenizer(text).Parse(); if (!parseTree.Any()) return text; var currNode = parseTree.NodeByOffset(offset); var query = Parents(currNode).Reverse().Skip(1).FirstOrDefault() as SqlGroup; if (query == null) return text; var start = query.StartOffset; var endLiteral = LastLiteral(query); var end = endLiteral.StartOffset + endLiteral.Text.Length; return text.Substring(start, end - start); }
private List <ScriptStatement> BreakIntoStatements(bool ansiQuotes, bool noBackslashEscapes) { int startIndex = 0; List <ScriptStatement> list = new List <ScriptStatement>(); List <int> lineNumbers = this.BreakScriptIntoLines(); SqlTokenizer tokenizer = new SqlTokenizer(this.query); tokenizer.AnsiQuotes = ansiQuotes; tokenizer.BackslashEscapes = !noBackslashEscapes; for (string str = tokenizer.NextToken(); str != null; str = tokenizer.NextToken()) { if (!tokenizer.Quoted && !tokenizer.IsSize) { int index = str.IndexOf(this.Delimiter); if (index != -1) { int num3 = (tokenizer.Index - str.Length) + index; if (tokenizer.Index == (this.query.Length - 1)) { num3++; } string str2 = this.query.Substring(startIndex, num3 - startIndex); ScriptStatement item = new ScriptStatement(); item.text = str2.Trim(); item.line = this.FindLineNumber(startIndex, lineNumbers); item.position = startIndex - lineNumbers[item.line]; list.Add(item); startIndex = num3 + this.delimiter.Length; } } } if (tokenizer.Index > startIndex) { string str3 = this.query.Substring(startIndex).Trim(); if (!string.IsNullOrEmpty(str3)) { ScriptStatement statement2 = new ScriptStatement(); statement2.text = str3; statement2.line = this.FindLineNumber(startIndex, lineNumbers); statement2.position = startIndex - lineNumbers[statement2.line]; list.Add(statement2); } } return(list); }
public void testForBlockComment() { var tokenizer = new SqlTokenizer("where /*%for element : list*/bbb"); tokenizer.Next().Is(SqlTokenType.WHERE_WORD); tokenizer.Token.Is("where"); var ex = Assert.Throws <UnsupportedSqlCommentException>(() => tokenizer.Next()); ex.IsNotNull(); //tokenizer.Next().Is(SqlTokenType.WHITESPACE); //tokenizer.Token.Is(" "); //tokenizer.Next().Is(SqlTokenType.FOR_BLOCK_COMMENT); //tokenizer.Token.Is("/*%for element : list*/"); //tokenizer.Next().Is(SqlTokenType.WORD); //tokenizer.Token.Is("bbb"); //tokenizer.Next().Is(SqlTokenType.EOF); //tokenizer.Token.IsNull(); }
public void SelectContextTest() { var sql = @"select 1 + 3 count , tr.minor_rev from innovator.[transaction_request] tr inner join innovator.[Part_bom] pb on pb.source_id = tr.part inner join ( select id, stuff, another from innovar.part ) d on d.id = pb.relate_id group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var context = new SqlContext(parsed); Assert.AreEqual(3, context.Tables.Count()); }
public override IReadOnlyList <INode> GetNodes(ISqlLoader loader, IGeneratorOption option, MethodInfo mi) { var parameters = BuildHelper.GetParameters(option, mi); var order = BuildHelper.PickParameter <OrderAttribute>(parameters); var tableName = table ?? (type != null ? BuildHelper.GetTableNameOfType(option, type) : null) ?? BuildHelper.GetReturnTableName(option, mi); if (String.IsNullOrEmpty(tableName)) { throw new BuilderException($"Table name resolve failed. type=[{mi.DeclaringType.FullName}], method=[{mi.Name}]"); } var sql = new StringBuilder(); sql.Append("SELECT * FROM "); sql.Append(tableName); BuildHelper.AddCondition(sql, parameters); if (order != null) { sql.Append(" ORDER BY "); sql.Append($"/*# {order.Name} */dummy"); } else if (!String.IsNullOrEmpty(Order)) { sql.Append(" ORDER BY "); sql.Append(Order); } else { var columns = BuildHelper.MakeKeyColumns(option, mi.ReturnType); if (!String.IsNullOrEmpty(columns)) { sql.Append(" ORDER BY "); sql.Append(columns); } } var tokenizer = new SqlTokenizer(sql.ToString()); var builder = new NodeBuilder(tokenizer.Tokenize()); return(builder.Build()); }
public void testBindBlockComment_followingWordAndQuote() { var tokenizer = new SqlTokenizer("where /*aaa*/timestamp'2001-01-01 12:34:56' and"); tokenizer.Next().Is(SqlTokenType.WHERE_WORD); tokenizer.Token.Is("where"); tokenizer.Next().Is(SqlTokenType.WHITESPACE); tokenizer.Token.Is(" "); tokenizer.Next().Is(SqlTokenType.BIND_VARIABLE_BLOCK_COMMENT); tokenizer.Token.Is("/*aaa*/"); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("timestamp'2001-01-01 12:34:56'"); tokenizer.Next().Is(SqlTokenType.WHITESPACE); tokenizer.Token.Is(" "); tokenizer.Next().Is(SqlTokenType.AND_WORD); tokenizer.Token.Is("and"); tokenizer.Next().Is(SqlTokenType.EOF); tokenizer.Token.IsNull(); }
/// <summary> /// GetForeignKeysOnTable retrieves the foreign keys on the given table. /// Since MySQL supports foreign keys on versions prior to 5.0, we can't use /// information schema. MySQL also does not include any type of SHOW command /// for foreign keys so we have to resort to use SHOW CREATE TABLE and parsing /// the output. /// </summary> /// <param name="fkTable">The table to store the key info in.</param> /// <param name="tableToParse">The table to get the foeign key info for.</param> /// <param name="filterName">Only get foreign keys that match this name.</param> /// <param name="includeColumns">Should column information be included in the table.</param> private void GetForeignKeysOnTable(DataTable fkTable, DataRow tableToParse, string filterName, bool includeColumns) { string sqlMode = GetSqlMode(); if (filterName != null) { filterName = filterName.ToLower(CultureInfo.InvariantCulture); } string sql = string.Format("SHOW CREATE TABLE `{0}`.`{1}`", tableToParse["TABLE_SCHEMA"], tableToParse["TABLE_NAME"]); string lowerBody = null, body = null; MySqlCommand cmd = new MySqlCommand(sql, connection); using (MySqlDataReader reader = cmd.ExecuteReader()) { reader.Read(); body = reader.GetString(1); lowerBody = body.ToLower(CultureInfo.InvariantCulture); } SqlTokenizer tokenizer = new SqlTokenizer(lowerBody); tokenizer.AnsiQuotes = sqlMode.IndexOf("ANSI_QUOTES") != -1; tokenizer.BackslashEscapes = sqlMode.IndexOf("NO_BACKSLASH_ESCAPES") != -1; while (true) { string token = tokenizer.NextToken(); // look for a starting contraint while (token != null && (token != "constraint" || tokenizer.Quoted)) { token = tokenizer.NextToken(); } if (token == null) { break; } ParseConstraint(fkTable, tableToParse, tokenizer, includeColumns); } }
public async void WhenPassedWhitespaceThenLineComment_ReturnsCorrectResult() { var actualTask = SqlTokenizer.TokenizeAsync(" --This is a comment\n "); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 1)); expected[0].Text = " "; expected.Add(new SqlToken(SqlToken.TokenTypes.LineCommentStart, 1, 4)); expected[1].Text = "--"; expected.Add(new SqlToken(SqlToken.TokenTypes.LineCommentBody, 1, 6)); expected[2].Text = "This is a comment"; expected.Add(new SqlToken(SqlToken.TokenTypes.Newline, 1, 23)); expected[3].Text = "\r\n"; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 2, 1)); expected[4].Text = " "; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
public void SelectColumnNamesEqualsTest() { var sql = @"select count = 1 + 3 , isnull(id, config_id) not_null_id , isnull(id, config_id) /* not-named */ , isnull(id, config_id) , 3 , created_by_id = thing , owned_by_id stuff , major_rev = tr.major_rev2 , tr.minor_rev from innovator.[transaction_request] tr group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var correct = new string[] { "count", "not_null_id", "created_by_id", "stuff", "major_rev", "minor_rev" }; var names = parsed.GetColumnNames().ToArray(); CollectionAssert.AreEqual(correct, names); }
public override void Execute() { var all = _addIn.AllText; var sql = _addIn.TextDocument.Selection.Text; var tokenizer = new SqlTokenizer( sql ); tokenizer.SkipWhiteSpace = false; var output = new System.Text.StringBuilder(); tokenizer.ReadNextToken(); while ( tokenizer.HasMoreTokens ) { if ( tokenizer.Current != (Token)null ) output.Append( ProcessToken( tokenizer.Current.Value ) ); tokenizer.ReadNextToken(); } _addIn.InsertText( output.ToString() ); }
/// <inheritdoc /> public EntityInfo Extract(DbCommand command, SqlString sqlString) { if (!sqlString.StartsWithCaseInsensitive("INSERT INTO")) { return(null); } var parser = new SqlTokenizer(new SqlString(command.CommandText)); using (var enumerator = parser.GetEnumerator()) { ReadToTablePath(enumerator); var tablePath = ReadTablePath(enumerator); if (tablePath == null) { return(null); } var columns = ReadColumns(enumerator); if (columns == null) { return(null); } ReadToValues(enumerator); var parameters = ReadParameters(enumerator); if (parameters == null) { return(null); } if (columns.Length != parameters.Length) { return(null); } return(new EntityInfo() { State = EntityState.Added, TablePath = tablePath, Values = ExtractorHelper.CreateValues(columns, parameters, command) }); } }
public async void MultilineString_ParsesCorrectly() { var actualTask = SqlTokenizer.TokenizeAsync("'testline1\r\ntestline2'"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.StringStart, 1, 1)); expected[0].Text = "'"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringBody, 1, 2)); expected[1].Text = "testline1"; expected.Add(new SqlToken(SqlToken.TokenTypes.Newline, 1, 11)); expected[2].Text = "\r\n"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringBody, 2, 1)); expected[3].Text = "testline2"; expected.Add(new SqlToken(SqlToken.TokenTypes.StringEnd, 2, 10)); expected[4].Text = "'"; var actual = await actualTask; AssertArePropertiesEqual(expected, actual); }
public void SelectColumnNamesSubSelectTest() { var sql = @"select * from ( select count, non_null_id, b.* from a inner join ( select first, second, third from another ) b on a.thing = b.stuff ) d group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var correct = new string[] { "count", "non_null_id", "first", "second", "third" }; var ctx = new SqlContext(parsed); var names = ctx.Tables.Single().Columns.ToArray(); CollectionAssert.AreEqual(correct, names); }
public void SelectColumnNamesTest() { var sql = @"select 1 + 3 count , isnull(id, config_id) not_null_id , isnull(id, config_id) /* not-named */ , isnull(id, config_id) named /* named */ , isnull(id, config_id) named2 -- named asdfsdf , isnull(id, config_id) , 3 , created_by_id , owned_by_id stuff , tr.major_rev , tr.minor_rev from innovator.[transaction_request] tr group by id, config_id, major_rev"; var parsed = new SqlTokenizer(sql).Parse(); var correct = new string[] { "count", "not_null_id", "named", "named2", "created_by_id", "stuff", "major_rev", "minor_rev" }; var names = parsed.GetColumnNames().ToArray(); CollectionAssert.AreEqual(correct, names); }
public void ScanAll() { DirectoryInfo dir = new DirectoryInfo(Path.Combine(Path.GetDirectoryName(typeof(TokenizerTests).Assembly.Location), "sql-files")); foreach (FileInfo fi in dir.GetFiles("*.sql", SearchOption.AllDirectories)) { string file = fi.FullName; if (fi.Name.Equals("PROJECTCONTROLS.sql")) { continue; } using (StreamReader sr = File.OpenText(file)) using (SqlReader rdr = new SqlReader(sr, new SqlSource(file))) using (SqlTokenizer tk = new SqlTokenizer(rdr)) { tk.Dialect = SqlDialect.Oracle; Console.WriteLine($"== {file} =="); int n = 0; List <SqlToken> tokens = new List <SqlToken>(); foreach (var v in tk) { switch (v.Kind) { case SqlKind.UnknownCharToken: Console.WriteLine($"{n++:000}: {v.Kind}: {v} ({v.Position})"); break; } tokens.Add(v); } string expected = File.ReadAllText(file).Replace("\r", ""); string actual = string.Join("", tokens.Select(x => x.ToFullString())); Assert.AreEqual(expected, actual); } } }
internal string GetCommandTextForBatching() { if (this.batchableCommandText == null) { if (string.Compare(this.CommandText.Substring(0, 6), "INSERT", true) == 0) { MySqlCommand command = new MySqlCommand("SELECT @@sql_mode", this.Connection); string str = command.ExecuteScalar().ToString().ToLower(CultureInfo.InvariantCulture); SqlTokenizer tokenizer = new SqlTokenizer(this.CommandText); tokenizer.AnsiQuotes = str.IndexOf("ansi_quotes") != -1; tokenizer.BackslashEscapes = str.IndexOf("no_backslash_escapes") == -1; for (string str2 = tokenizer.NextToken().ToLower(CultureInfo.InvariantCulture); str2 != null; str2 = tokenizer.NextToken()) { if ((str2.ToLower(CultureInfo.InvariantCulture) == "values") && !tokenizer.Quoted) { str2 = tokenizer.NextToken(); while ((str2 != null) && (str2 != ")")) { this.batchableCommandText = this.batchableCommandText + str2; str2 = tokenizer.NextToken(); } if (str2 != null) { this.batchableCommandText = this.batchableCommandText + str2; } str2 = tokenizer.NextToken(); if ((str2 != null) && ((str2 == ",") || (str2.ToLower(CultureInfo.InvariantCulture) == "on"))) { this.batchableCommandText = null; break; } } } } if (this.batchableCommandText == null) { this.batchableCommandText = this.CommandText; } } return(this.batchableCommandText); }
/// <inheritdoc /> public EntityInfo Extract(DbCommand command, SqlString sqlString) { if (!sqlString.StartsWithCaseInsensitive("UPDATE")) { return(null); } var parser = new SqlTokenizer(new SqlString(command.CommandText)); using (var enumerator = parser.GetEnumerator()) { ReadToTablePath(enumerator); var tablePath = ReadTablePath(enumerator); if (tablePath == null) { return(null); } var set = ReadSet(enumerator); if (set == null) { return(null); } var where = ReadWhere(enumerator); if (where == null) { return(null); } var setValues = ExtractorHelper.CreateValues(set.Item1, set.Item2, command); var whereValues = ExtractorHelper.CreateValues(where.Item1, where.Item2, command); return(new EntityInfo() { State = EntityState.Modified, TablePath = tablePath, Values = ExtractorHelper.Union(whereValues, setValues), UpdatedKey = ExtractorHelper.Intersect(setValues, whereValues) }); } }
private ArrayList TokenizeSql2(string sql) { ArrayList list = new ArrayList(); StringBuilder builder = new StringBuilder(); bool flag = this.Connection.Settings.AllowBatch & this.Driver.SupportsBatch; int startIndex = 0; SqlTokenizer tokenizer = new SqlTokenizer(sql); string str = this.Connection.driver.Property("sql_mode"); if (str != null) { str = str.ToString().ToLower(); tokenizer.AnsiQuotes = str.IndexOf("ansi_quotes") != -1; tokenizer.BackslashEscapes = str.IndexOf("no_backslash_escapes") == -1; } for (string str2 = tokenizer.NextToken(); str2 != null; str2 = tokenizer.NextToken()) { if ((str2 == ";") && !flag) { list.Add(builder.ToString()); builder.Remove(0, builder.Length); } else if ((str2.Length >= 2) && (((str2[0] == '@') && (str2[1] != '@')) || (str2[0] == '?'))) { list.Add(builder.ToString()); builder.Remove(0, builder.Length); } else { builder.Append(sql.Substring(startIndex, (tokenizer.Index - startIndex) + 1)); startIndex = tokenizer.Index; } } if (builder.Length > 0) { list.Add(builder.ToString()); } return(list); }
public void TestUpdate() { var tokenizer = new SqlTokenizer( "UPDATE Data " + "SET Value1 = /*@ value1 */100, Value2 = /*@ value2 */'x' " + "WHERE Key1 = /*@ key1 */1 AND Key2 = /*@ key2 */'a'"); var builder = new NodeBuilder(tokenizer.Tokenize()); var nodes = builder.Build(); Assert.Equal(8, nodes.Count); var node0 = nodes[0] as SqlNode; Assert.NotNull(node0); var node1 = nodes[1] as ParameterNode; Assert.NotNull(node1); Assert.Equal("value1", node1.Name); var node2 = nodes[2] as SqlNode; Assert.NotNull(node2); var node3 = nodes[3] as ParameterNode; Assert.NotNull(node3); Assert.Equal("value2", node3.Name); var node4 = nodes[4] as SqlNode; Assert.NotNull(node4); var node5 = nodes[5] as ParameterNode; Assert.NotNull(node5); Assert.Equal("key1", node5.Name); var node6 = nodes[6] as SqlNode; Assert.NotNull(node6); var node7 = nodes[7] as ParameterNode; Assert.NotNull(node7); Assert.Equal("key2", node7.Name); }
public MsSqlSelectParser(SqlString sql) { if (sql == null) throw new ArgumentNullException("sql"); this.Sql = sql; this.SelectIndex = this.FromIndex = this.OrderByIndex = -1; var tokenEnum = new SqlTokenizer(sql).GetEnumerator(); tokenEnum.MoveNext(); // Custom SQL may contain multiple SELECT statements, for example to assign parameters. // Therefore we loop over SELECT statements until a SELECT is found that returns data. SqlToken selectToken; bool isDistinct; if (tokenEnum.TryParseUntilFirstMsSqlSelectColumn(out selectToken, out isDistinct)) { this.SelectIndex = selectToken.SqlIndex; this.IsDistinct = isDistinct; _columns.AddRange(ParseColumnDefinitions(tokenEnum)); if (tokenEnum.TryParseUntil("from")) { this.FromIndex = tokenEnum.Current.SqlIndex; SqlToken orderToken; if (tokenEnum.TryParseUntilFirstOrderColumn(out orderToken)) { this.OrderByIndex = orderToken.SqlIndex; foreach (var order in ParseOrderDefinitions(tokenEnum)) { _orders.Add(order); if (!order.Column.InSelectClause) { _columns.Add(order.Column); } } } } return; } }
public void testExpandBlockComment_alias() { var tokenizer = new SqlTokenizer("select /*%expand e*/* from"); tokenizer.Next().Is(SqlTokenType.SELECT_WORD); tokenizer.Token.Is("select"); var ex = Assert.Throws <UnsupportedSqlCommentException>(() => tokenizer.Next()); ex.IsNotNull(); //tokenizer.Next().Is(SqlTokenType.WHITESPACE); //tokenizer.Token.Is(" "); //tokenizer.Next().Is(SqlTokenType.EXPAND_BLOCK_COMMENT); //tokenizer.Token.Is("/*%expand e*/"); //tokenizer.Next().Is(SqlTokenType.OTHER); //tokenizer.Token.Is("*"); //tokenizer.Next().Is(SqlTokenType.WHITESPACE); //tokenizer.Token.Is(" "); //tokenizer.Next().Is(SqlTokenType.FROM_WORD); //tokenizer.Token.Is("from"); //tokenizer.Next().Is(SqlTokenType.EOF); //tokenizer.Token.IsNull(); }
public async void RegularBracketizedTokens_ReturnCorrectlyResult() { int tokenIndex = 0; var actualTask = SqlTokenizer.TokenizeAsync("SELECT 1 [z], 2 [yy]"); var expected = new List <SqlToken>(); expected.Add(new SqlToken(SqlToken.TokenTypes.Keyword, 1, 1)); expected[tokenIndex].Text = "SELECT"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 7)); expected[tokenIndex].Text = " "; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Unknown, 1, 8)); expected[tokenIndex].Text = "1"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 9)); expected[tokenIndex].Text = " "; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.OpenBracket, 1, 10)); expected[tokenIndex].Text = "["; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.BracketBody, 1, 11)); expected[tokenIndex].Text = "z"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.CloseBracket, 1, 12)); expected[tokenIndex].Text = "]"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Comma, 1, 13)); expected[tokenIndex].Text = ","; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 14)); expected[tokenIndex].Text = " "; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Unknown, 1, 15)); expected[tokenIndex].Text = "2"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.Whitespace, 1, 16)); expected[tokenIndex].Text = " "; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.OpenBracket, 1, 17)); expected[tokenIndex].Text = "["; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.BracketBody, 1, 18)); expected[tokenIndex].Text = "yy"; tokenIndex += 1; expected.Add(new SqlToken(SqlToken.TokenTypes.CloseBracket, 1, 20)); expected[tokenIndex].Text = "]"; tokenIndex += 1; var actual = await actualTask; TokenizerGeneralSyntaxTests.AssertArePropertiesEqual(expected, actual); }
private string GetProcedureParameterLine(DataRow isRow) { string format = "SHOW CREATE {0} `{1}`.`{2}`"; MySqlCommand command = new MySqlCommand(string.Format(format, isRow["ROUTINE_TYPE"], isRow["ROUTINE_SCHEMA"], isRow["ROUTINE_NAME"]), base.connection); using (MySqlDataReader reader = command.ExecuteReader()) { string str4; reader.Read(); if (reader.IsDBNull(2)) { return(null); } string str2 = reader.GetString(1); string input = reader.GetString(2); SqlTokenizer tokenizer = new SqlTokenizer(input); tokenizer.AnsiQuotes = str2.IndexOf("ANSI_QUOTES") != -1; tokenizer.BackslashEscapes = str2.IndexOf("NO_BACKSLASH_ESCAPES") == -1; for (str4 = tokenizer.NextToken(); str4 != "("; str4 = tokenizer.NextToken()) { } int startIndex = tokenizer.Index + 1; str4 = tokenizer.NextToken(); while ((str4 != ")") || tokenizer.Quoted) { str4 = tokenizer.NextToken(); if ((str4 == "(") && !tokenizer.Quoted) { while ((str4 != ")") || tokenizer.Quoted) { str4 = tokenizer.NextToken(); } str4 = tokenizer.NextToken(); } } return(input.Substring(startIndex, tokenizer.Index - startIndex)); } }
public void testColumnNumber() { var tokenizer = new SqlTokenizer("aaa bbb\nc\nd eee\n"); tokenizer.Position.Is(0); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("aaa"); tokenizer.Position.Is(3); tokenizer.Next().Is(SqlTokenType.WHITESPACE); tokenizer.Token.Is(" "); tokenizer.Position.Is(4); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("bbb"); tokenizer.Position.Is(7); tokenizer.Next().Is(SqlTokenType.EOL); tokenizer.Token.Is("\n"); tokenizer.Position.Is(0); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("c"); tokenizer.Position.Is(1); tokenizer.Next().Is(SqlTokenType.EOL); tokenizer.Token.Is("\n"); tokenizer.Position.Is(0); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("d"); tokenizer.Position.Is(1); tokenizer.Next().Is(SqlTokenType.WHITESPACE); tokenizer.Token.Is(" "); tokenizer.Position.Is(2); tokenizer.Next().Is(SqlTokenType.WORD); tokenizer.Token.Is("eee"); tokenizer.Position.Is(5); tokenizer.Next().Is(SqlTokenType.EOL); tokenizer.Token.Is("\n"); tokenizer.Position.Is(0); tokenizer.Next().Is(SqlTokenType.EOF); tokenizer.Token.IsNull(); }
public IPromise<CompletionContext> Completions(string prefix, ITextSource all, int caret, string termCharacter , bool tableNameColumnPrefix = false) { try { _tableNameColumnPrefix = tableNameColumnPrefix; var lastIndex = string.IsNullOrEmpty(termCharacter) ? -1 : all.IndexOf(termCharacter, caret, all.TextLength - caret, StringComparison.Ordinal); var sql = prefix + (lastIndex < 0 ? all.GetText(caret, all.TextLength - caret) : all.GetText(caret, lastIndex - caret)); if (sql.StartsWith("(") && sql.EndsWith(")")) sql = sql.Substring(1, sql.Length - 2); var parseTree = new SqlTokenizer(sql).Parse(); if (!parseTree.Any()) return Promises.Resolved(new CompletionContext()); var currNode = parseTree.NodeByOffset(prefix.Length); var literal = currNode as SqlLiteral; if (literal != null) { var parGroup = literal.Parent as SqlGroup; if (_overloadWin != null && (parGroup == null || !parGroup.First().TextEquals(_overloadName))) _overloadWin.Close(); if (SqlTokenizer.KeywordPrecedesTable(literal)) { var context = new SqlContext(parGroup); return ContextFromData(Tables(null).Concat(Schemas()) .Concat(context.Definitions.Select(d => new SqlGeneralCompletionData() { Text = d, Description = "Locally defined table", Image = WpfImages.Class16 })) .OrderBy(i => i.Text)); } else if (literal.Text == "(") { var prev = literal.PreviousLiteral(); if (prev != null) { if (CurrentTextArea != null) { var overloads = from f in _coreFunctions where string.Equals(f.Name, prev.Text, StringComparison.OrdinalIgnoreCase) select new Overload(f.Usage, f.Description); if (overloads.Any()) { _overloadWin = new OverloadInsightWindow(CurrentTextArea); _overloadWin.StartOffset = caret; _overloadWin.EndOffset = caret + 1; _overloadWin.Provider = new OverloadList().AddRange(overloads); _overloadWin.Show(); _overloadWin.Closed += (s, e) => { _overloadWin = null; _overloadName = null; }; _overloadName = prev.Text; } } switch (prev.Text.ToUpperInvariant()) { case "DATEADD": case "DATEDIFF": case "DATEDIFF_BIG": case "DATEFROMPARTS": case "DATENAME": case "DATEPART": return ContextFromData(_datePartNames.Select(n => new SqlGeneralCompletionData() { Text = n[0] + (n[1] == n[0] ? "" : " (" + n[1] + ")"), Description = n[1], Image = WpfImages.EnumValue16, Action = () => n[0] }) .OrderBy(i => i.Text)); } } } else if (literal.Text == ".") { var name = literal.Parent as SqlName; if (name != null) { if (name.IsTable) { var idx = name.IndexOf(literal); var schema = name[idx - 1].Text; if (_provider.GetSchemaNames().Contains(schema, StringComparer.OrdinalIgnoreCase)) { return ContextFromData(Tables(schema).Concat(Functions(true, schema)) .OrderBy(i => i.Text)); } } else { var group = name.Parent as SqlGroup; if (group != null) { var idx = name.IndexOf(literal); var context = new SqlContext(group); SqlTableInfo info; if (idx > 0 && name[idx - 1] is SqlLiteral && context.TryByName(((SqlLiteral)name[idx - 1]).Text.ToLowerInvariant(), out info)) { return Columns(info, false).ToPromise().Convert(c => new CompletionContext() { Items = c }); } } } } } else if (literal.Type == SqlType.Keyword || literal.Type == SqlType.Operator) { switch (literal.Text.ToLowerInvariant()) { case "union": return ContextFromData(MatchCase(literal.Text, "all", "select") .GetCompletions<SqlGeneralCompletionData>()); case "group": case "order": case "partition": return ContextFromData(MatchCase(literal.Text, "by") .GetCompletions<SqlGeneralCompletionData>()); case "insert": return ContextFromData(MatchCase(literal.Text, "into") .GetCompletions<SqlGeneralCompletionData>()); case "delete": return ContextFromData(MatchCase(literal.Text, "from") .GetCompletions<SqlGeneralCompletionData>()); } var group = literal.Parent as SqlGroup; if (group != null) { // List of sql specific constructs for the context var sqlOthers = new List<string>(); switch (literal.Text.ToLowerInvariant()) { case "select": sqlOthers.Add("*"); sqlOthers.Add("distinct"); sqlOthers.Add("top"); break; } // Table aliases and functions var context = new SqlContext(group); var others = context.Tables .Where(t => !string.IsNullOrEmpty(t.Alias)) .Select(t => t.Alias) .Distinct() .Select(t => new SqlGeneralCompletionData() { Text = t, Image = WpfImages.Class16 }) .Concat(Functions(false, null)); // Table columns return Promises.All(context.Tables.Select(t => Columns(t).ToPromise()).ToArray()) .Convert(l => new CompletionContext() { Items = l.OfType<IEnumerable<ICompletionData>>() .SelectMany(p => p) .Concat(MatchCase(literal.Text, sqlOthers).Select(o => new SqlGeneralCompletionData() { Text = o, Image = WpfImages.Operator16 })) .Concat(others) .Concat(Schemas()) .OrderBy(i => i.Text, StringComparer.OrdinalIgnoreCase) }); } } } return Promises.Resolved(new CompletionContext()); } catch (Exception ex) { return Promises.Rejected<CompletionContext>(ex); } }
public override SqlString GetLimitString(SqlString querySqlString, SqlString offset, SqlString limit) { var tokenEnum = new SqlTokenizer(querySqlString).GetEnumerator(); if (!tokenEnum.TryParseUntilFirstMsSqlSelectColumn()) return null; int insertPoint = tokenEnum.Current.SqlIndex; return querySqlString.Insert(insertPoint, new SqlString("top ", limit, " ")); }
public void SqlPartialTest() { var sql = @"select * from ( select from innovator.[PART]"; var parsed = new SqlTokenizer(sql).Parse(); Assert.AreEqual(typeof(SqlName), ((SqlGroup)parsed.Last()).Last().GetType()); }
public static IEnumerable<SqlName> GetInnovatorNames(string sql) { var parsed = new SqlTokenizer(sql).ToArray(); return parsed.OfType<SqlName>() .Where(n => string.Equals(n[0].Text, "innovator", StringComparison.OrdinalIgnoreCase) || (!n.Any(l => l.Text == ".") && !n[0].Text.StartsWith("@"))); }