public LexTokenCollection getTokens(bool bHaveComment, bool bHasBlanks) { LexTokenCollection lexTokenCollection = new LexTokenCollection(); LexToken lexToken; while ((lexToken = this.nextToken()) != null && lexToken.getType() != LexTokenTypes.NULL) { if (!lexToken.isComment() || bHaveComment) { if (lexToken.getType() != LexTokenTypes.BLANK || bHasBlanks) { lexTokenCollection.Add(lexToken); } } } return(lexTokenCollection); }
public static List <LexTokenCollection> SplitSQLToken(string strSql, bool isContainsComments) { List <LexTokenCollection> result; if (strSql == null || strSql.Trim().Length < 1) { result = null; } else { SqlTokenizer sqlTokenizer = new SqlTokenizer(strSql); List <LexTokenCollection> list = new List <LexTokenCollection>(); LexTokenCollection lexTokenCollection = new LexTokenCollection(); foreach (LexToken current in sqlTokenizer.getTokens(isContainsComments, false)) { if ((current.isSpecial() && current.Equals(";")) || (current.isName() && current.getToken().Equals("GO", StringComparison.CurrentCultureIgnoreCase))) { if (lexTokenCollection.Count > 0) { list.Add(lexTokenCollection); } lexTokenCollection = new LexTokenCollection(); } else { lexTokenCollection.Add(current); } } if (lexTokenCollection.Count > 0) { list.Add(lexTokenCollection); } result = ((list.Count > 0) ? list : null); } return(result); }