private static void SaveCurrentCharToNewToken(TokenizationState state, SqlTokenType tokenType) { char charToSave = state.CurrentChar; state.HasUnprocessedCurrentCharacter = false; //because we're using it now! SaveToken(state, tokenType, charToSave.ToString()); }
public SqlToken(SqlTokenType tokenType, string text, int startPosition, int endPosition) { TokenType = tokenType; Text = text; StartPosition = startPosition; EndPosition = endPosition; }
internal static string Extract(SqlTokenType tokenType, string token) { switch (tokenType) { case SqlTokenType.BIND_VARIABLE_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(2, token.Length - 2 - 2))); case SqlTokenType.LITERAL_VARIABLE_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(3, token.Length - 2 - 3))); case SqlTokenType.EMBEDDED_VARIABLE_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(3, token.Length - 2 - 3))); case SqlTokenType.IF_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(5, token.Length - 2 - 5))); case SqlTokenType.ELSEIF_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(9, token.Length - 2 - 9))); case SqlTokenType.FOR_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(6, token.Length - 2 - 6))); case SqlTokenType.EXPAND_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(9, token.Length - 2 - 9))); case SqlTokenType.POPULATE_BLOCK_COMMENT: return(TrimWhitespace(token.Substring(11, token.Length - 2 - 11))); default: return(token); } }
public SqlToken(SqlTokenType type, string value, int startCharIndex, int endCharIndex) { this.type = type; this.value = value; this.startCharIndex = startCharIndex; this.endCharIndex = endCharIndex; }
public SqlToken(SqlTokenType tokenType, SqlString sql, int sqlIndex, int length) { _tokenType = tokenType; _sql = sql; _sqlIndex = sqlIndex; _length = length; }
/// <summary> /// Gets color. /// </summary> /// <param name="sqlTokenType">SqlTokenType.</param> /// <returns>Color.</returns> private static Color GetColor(SqlTokenType sqlTokenType) { Color color = Color.Black; switch (sqlTokenType) { case SqlTokenType.String: color = Color.Red; break; case SqlTokenType.Keyword: case SqlTokenType.DataType: color = Color.Blue; break; case SqlTokenType.Operator: case SqlTokenType.Sign: color = Color.Gray; break; case SqlTokenType.Function: color = Color.Magenta; break; case SqlTokenType.Comment: color = Color.Green; break; } return(color); }
public SqlToken(SqlTokenType type, string value) { this.type = type; this.value = value; startCharIndex = 0; endCharIndex = 0; }
public static SqlToken ExpectPeek(this ITokenizer ITokenizer, SqlTokenType type) { var found = ITokenizer.Peek(); if (found.Type != type) { throw ParsingException.UnexpectedToken(type, found); } return(found); }
public LiteralValueTestParameters( string chars, SqlTokenType tokenType, HsqlProviderType providerType, object value, int?errorCode, string errorMessage) { this.Chars = chars; this.TokenType = tokenType; this.ProviderType = providerType; this.Value = value; this.ErrorCode = errorCode; this.ErrorMessage = errorMessage; }
/// <summary> /// Initializes a new instance of the <see cref="Token"/> class. /// </summary> /// <param name="value"> /// The token value, which must be in its normalized character sequence form. /// </param> /// <param name="type"> /// Type of the token, which allows the givne token value to be correctly converted /// to a <see cref="LiteralValue"/>, etc. /// </param> public Token(string value, SqlTokenType type) { if (string.IsNullOrEmpty(value)) { throw new ArgumentNullException("value"); } if (type == SqlTokenType.None) { throw new ArgumentOutOfRangeException( "type", type, "Not a valid token type"); } m_value = value; m_type = type; }
private void PeekFourChars(char c, char c2, char c3, char c4) { if ((c == 'f' || c == 'F') && (c2 == 'r' || c2 == 'R') && (c3 == 'o' || c3 == 'O') && (c4 == 'm' || c4 == 'M')) { _tokenType = SqlTokenType.FROM_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekThreeChars(c, c2, c3); }
public static void Skip(this ITokenizer ITokenizer, SqlTokenType type) { while (true) { var t = ITokenizer.GetNext(); if (t.Type == SqlTokenType.EndOfInput || type == SqlTokenType.EndOfInput) { break; } if (t.Type != type) { ITokenizer.PutBack(t); break; } } }
public static bool NextIs(this ITokenizer ITokenizer, SqlTokenType type, string value, bool consume = false) { var t = ITokenizer.GetNext(); bool isSame = t.Type == type && t.Value == value; if (!isSame) { ITokenizer.PutBack(t); return(false); } if (!consume) { ITokenizer.PutBack(t); } return(true); }
public static SqlToken Expect(this ITokenizer ITokenizer, SqlTokenType type, params string[] values) { var found = ITokenizer.GetNext(); if (found.Type == type) { foreach (var value in values) { if (found.Value == value) { return(found); } } } throw ParsingException.UnexpectedToken(type, values, found); }
private void PeekSixChars(char c, char c2, char c3, char c4, char c5, char c6) { if ((c == 's' || c == 'S') && (c2 == 'e' || c2 == 'E') && (c3 == 'l' || c3 == 'L') && (c4 == 'e' || c4 == 'E') && (c5 == 'c' || c5 == 'C') && (c6 == 't' || c6 == 'T')) { _tokenType = SqlTokenType.SELECT_WORD; if (IsWordEnded()) { return; } } else if ((c == 'h' || c == 'H') && (c2 == 'a' || c2 == 'A') && (c3 == 'v' || c3 == 'V') && (c4 == 'i' || c4 == 'I') && (c5 == 'n' || c5 == 'N') && (c6 == 'g' || c6 == 'G')) { _tokenType = SqlTokenType.HAVING_WORD; if (IsWordEnded()) { return; } } else if ((c == 'e' || c == 'E') && (c2 == 'x' || c2 == 'X') && (c3 == 'c' || c3 == 'C') && (c4 == 'e' || c4 == 'E') && (c5 == 'p' || c5 == 'P') && (c6 == 't' || c6 == 'T')) { _tokenType = SqlTokenType.EXCEPT_WORD; if (IsWordEnded()) { return; } } else if ((c == 'u' || c == 'U') && (c2 == 'p' || c2 == 'P') && (c3 == 'd' || c3 == 'D') && (c4 == 'a' || c4 == 'A') && (c5 == 't' || c5 == 'T') && (c6 == 'e' || c6 == 'E')) { _tokenType = SqlTokenType.UPDATE_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekFiveChars(c, c2, c3, c4, c5); }
private void PeekTenChars(char c, char c2, char c3, char c4, char c5, char c6, char c7, char c8, char c9, char c10) { if ((c == 'f' || c == 'F') && (c2 == 'o' || c2 == 'O') && (c3 == 'r' || c3 == 'R') && (SqlTokenHelper.IsWhitespace(c4)) && (c5 == 'u' || c5 == 'U') && (c6 == 'p' || c6 == 'P') && (c7 == 'd' || c7 == 'D') && (c8 == 'a' || c8 == 'A') && (c9 == 't' || c9 == 'T') && (c10 == 'e' || c10 == 'E')) { _tokenType = SqlTokenType.FOR_UPDATE_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekNineChars(c, c2, c3, c4, c5, c6, c7, c8, c9); }
private void PeekNineChars(char c, char c2, char c3, char c4, char c5, char c6, char c7, char c8, char c9) { if ((c == 'i' || c == 'I') && (c2 == 'n' || c2 == 'N') && (c3 == 't' || c3 == 'T') && ((c4 == 'e' || c4 == 'E')) && (c5 == 'r' || c5 == 'R') && (c6 == 's' || c6 == 'S') && (c7 == 'e' || c7 == 'E') && (c8 == 'c' || c8 == 'C') && (c9 == 't' || c9 == 'T')) { _tokenType = SqlTokenType.INTERSECT_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekEightChars(c, c2, c3, c4, c5, c6, c7, c8); }
private static void SaveToken(TokenizationState state, SqlTokenType tokenType, string tokenValue) { var foundToken = new Token(tokenType, tokenValue); state.TokenContainer.Add(foundToken); long positionOfLastCharacterInToken = state.InputReader.LastCharacterPosition - (state.HasUnprocessedCurrentCharacter ? 1 : 0); if (state.RequestedMarkerPosition != null && state.TokenContainer.MarkerToken == null && state.RequestedMarkerPosition <= positionOfLastCharacterInToken ) { state.TokenContainer.MarkerToken = foundToken; //TODO: this is wrong for container types, as commented elsewhere. the marker position will be too high. var rawPositionInToken = foundToken.Value.Length - (positionOfLastCharacterInToken - state.RequestedMarkerPosition); // temporarily bypass overflow issues without fixing underlying problem state.TokenContainer.MarkerPosition = rawPositionInToken > foundToken.Value.Length ? foundToken.Value.Length : rawPositionInToken; } }
private void PeekEightChars(char c, char c2, char c3, char c4, char c5, char c6, char c7, char c8) { if ((c == 'g' || c == 'G') && (c2 == 'r' || c2 == 'R') && (c3 == 'o' || c3 == 'O') && (c4 == 'u' || c4 == 'U') && (c5 == 'p' || c5 == 'P') && (SqlTokenHelper.IsWhitespace(c6)) && (c7 == 'b' || c7 == 'B') && (c8 == 'y' || c8 == 'Y')) { _tokenType = SqlTokenType.GROUP_BY_WORD; if (IsWordEnded()) { return; } } else if ((c == 'o' || c == 'O') && (c2 == 'r' || c2 == 'R') && (c3 == 'd' || c3 == 'D') && (c4 == 'e' || c4 == 'E') && (c5 == 'r' || c5 == 'R') && (char.IsWhiteSpace(c6)) && (c7 == 'b' || c7 == 'B') && (c8 == 'y' || c8 == 'Y')) { _tokenType = SqlTokenType.ORDER_BY_WORD; if (IsWordEnded()) { return; } } else if ((c == 'o' || c == 'O') && (c2 == 'p' || c2 == 'P') && (c3 == 't' || c3 == 'T') && (c4 == 'i' || c4 == 'I') && (c5 == 'o' || c5 == 'O') && (c6 == 'n' || c6 == 'N') && (SqlTokenHelper.IsWhitespace(c7)) && (c8 == '(')) { _tokenType = SqlTokenType.OPTION_WORD; _charPosition = _charPosition - 2; return; } _charPosition = _charPosition - 1; PeekSevenChars(c, c2, c3, c4, c5, c6, c7); }
private void PeekFiveChars(char c, char c2, char c3, char c4, char c5) { if ((c == 'w' || c == 'W') && (c2 == 'h' || c2 == 'H') && (c3 == 'e' || c3 == 'E') && (c4 == 'r' || c4 == 'R') && (c5 == 'e' || c5 == 'E')) { _tokenType = SqlTokenType.WHERE_WORD; if (IsWordEnded()) { return; } } else if ((c == 'u' || c == 'U') && (c2 == 'n' || c2 == 'N') && (c3 == 'i' || c3 == 'I') && (c4 == 'o' || c4 == 'O') && (c5 == 'n' || c5 == 'N')) { _tokenType = SqlTokenType.UNION_WORD; if (IsWordEnded()) { return; } } else if ((c == 'm' || c == 'M') && (c2 == 'i' || c2 == 'I') && (c3 == 'n' || c3 == 'N') && (c4 == 'u' || c4 == 'U') && (c5 == 's' || c5 == 'S')) { _tokenType = SqlTokenType.MINUS_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekFourChars(c, c2, c3, c4); }
private void PeekThreeChars(char c, char c2, char c3) { if ((c == 'a' || c == 'A') && (c2 == 'n' || c2 == 'N') && (c3 == 'd' || c3 == 'D')) { _tokenType = SqlTokenType.AND_WORD; if (IsWordEnded()) { return; } } else if ((c == 's' || c == 'S') && (c2 == 'e' || c2 == 'E') && (c3 == 't' || c3 == 'T')) { _tokenType = SqlTokenType.SET_WORD; if (IsWordEnded()) { return; } } _charPosition = _charPosition - 1; PeekTwoChars(c, c2); }
private string GetEquivalentSqlNodeName(SqlTokenType tokenType) { switch (tokenType) { case SqlTokenType.WhiteSpace: return SqlXmlConstants.ENAME_WHITESPACE; case SqlTokenType.SingleLineComment: return SqlXmlConstants.ENAME_COMMENT_SINGLELINE; case SqlTokenType.MultiLineComment: return SqlXmlConstants.ENAME_COMMENT_MULTILINE; case SqlTokenType.BracketQuotedName: return SqlXmlConstants.ENAME_BRACKET_QUOTED_NAME; case SqlTokenType.Asterisk: return SqlXmlConstants.ENAME_ASTERISK; case SqlTokenType.Comma: return SqlXmlConstants.ENAME_COMMA; case SqlTokenType.Period: return SqlXmlConstants.ENAME_PERIOD; case SqlTokenType.NationalString: return SqlXmlConstants.ENAME_NSTRING; case SqlTokenType.String: return SqlXmlConstants.ENAME_STRING; case SqlTokenType.QuotedString: return SqlXmlConstants.ENAME_QUOTED_STRING; case SqlTokenType.OtherOperator: return SqlXmlConstants.ENAME_OTHEROPERATOR; case SqlTokenType.Number: return SqlXmlConstants.ENAME_NUMBER_VALUE; case SqlTokenType.MonetaryValue: return SqlXmlConstants.ENAME_MONETARY_VALUE; case SqlTokenType.BinaryValue: return SqlXmlConstants.ENAME_BINARY_VALUE; case SqlTokenType.PseudoName: return SqlXmlConstants.ENAME_PSEUDONAME; default: throw new Exception("Mapping not found for provided Token Type"); } }
public Token(SqlTokenType type, string value) { Type = type; Value = value; }
private void Ignore(SqlTokenType tokenType, bool canYield) { _includeTokens = canYield ? _includeTokens & ~tokenType : _includeTokens | tokenType; }
public IEnumerator <SqlToken> GetEnumerator() { int sqlIndex = 0; foreach (var part in _sql) { var parameter = part as Parameter; if (parameter != null) { if (CanYield(SqlTokenType.Parameter)) { yield return(new SqlToken(SqlTokenType.Parameter, _sql, sqlIndex, 1)); } sqlIndex++; continue; } var text = part as string; if (text != null) { int offset = 0; int maxOffset = text.Length; int tokenOffset = 0; SqlTokenType nextTokenType = 0; int nextTokenLength = 0; while (offset < maxOffset) { var ch = text[offset]; switch (ch) { case '(': nextTokenType = SqlTokenType.BracketOpen; nextTokenLength = 1; break; case ')': nextTokenType = SqlTokenType.BracketClose; nextTokenLength = 1; break; case '\'': // String literals case '\"': // ANSI quoted identifiers case '[': // Sql Server quoted indentifiers nextTokenType = SqlTokenType.DelimitedText; nextTokenLength = SqlParserUtils.ReadDelimitedText(text, maxOffset, offset); break; case ',': nextTokenType = SqlTokenType.Comma; nextTokenLength = 1; break; case '/': if (offset + 1 < maxOffset && text[offset + 1] == '*') { nextTokenType = SqlTokenType.Comment; nextTokenLength = SqlParserUtils.ReadMultilineComment(text, maxOffset, offset); } break; case '-': if (offset + 1 < maxOffset && text[offset + 1] == '-') { nextTokenType = SqlTokenType.Comment; nextTokenLength = SqlParserUtils.ReadLineComment(text, maxOffset, offset); } break; default: if (char.IsWhiteSpace(ch)) { nextTokenType = SqlTokenType.Whitespace; nextTokenLength = SqlParserUtils.ReadWhitespace(text, maxOffset, offset); } break; } if (nextTokenType != 0) { if (offset > tokenOffset) { if (CanYield(SqlTokenType.Text)) { yield return(new SqlToken(SqlTokenType.Text, _sql, sqlIndex + tokenOffset, offset - tokenOffset)); } } if (CanYield(nextTokenType)) { yield return(new SqlToken(nextTokenType, _sql, sqlIndex + offset, nextTokenLength)); } offset += nextTokenLength; tokenOffset = offset; nextTokenType = 0; nextTokenLength = 0; } else { offset++; } } if (maxOffset > tokenOffset && CanYield(SqlTokenType.Text)) { yield return(new SqlToken(SqlTokenType.Text, _sql, sqlIndex + tokenOffset, maxOffset - tokenOffset)); } sqlIndex += maxOffset; } } }
public LiteralValueTestParameters( string chars, SqlTokenType tokenType, HsqlProviderType providerType, object value, int? errorCode, string errorMessage) { this.Chars = chars; this.TokenType = tokenType; this.ProviderType = providerType; this.Value = value; this.ErrorCode = errorCode; this.ErrorMessage = errorMessage; }
private bool CanYield(SqlTokenType tokenType) { return((_includeTokens & tokenType) == tokenType); }
public ExpectedToken(SqlTokenType tokenType, string value) { this.TokenType = tokenType; this.Value = value; }
public bool Equals(SqlTokenType type, string value) { return(string.Compare(this.Value, value, true) == 0 && this.type == type); }
/// <summary> /// Appends the statically-bound representation of this /// list to the given <c>StringBuilder</c>. /// </summary> /// <param name="sb"> /// The <c>StringBuilder</c> to which to append. /// </param> /// <param name="parameters">The parameters to bind.</param> /// <param name="strict"> /// When <c>true</c>, an exception is raised if there exist /// any unbound named parameter or parameter marker tokens. /// </param> /// <returns> /// The <c>StringBuilder</c> to which has been appended /// the statically-bound representation of this list. /// </returns> /// <remarks> /// The statically-bound representation of this list is the charater /// sequence representation in which each <c>NamedParameter</c> <c>Token</c> /// is replaced by an SQL literal character sequence representating the /// <c>Value</c> property of the matching <c>HsqlParameter</c> /// object from the given <c>parameters</c> collection. /// </remarks> public StringBuilder AppendStaticallyBoundForm(StringBuilder sb, HsqlParameterCollection parameters, bool strict) { if (ParameterCount == 0) { return(AppendNormalizedForm(sb)); } else if (strict && ParameterMarkerCount > 0) { throw new HsqlDataSourceException( "Cannot statically bind parameter markers" + " ('?' tokens) by name."); //NOI18N } if (sb == null) { // Not perfectly accurate preallocation, but better than nothing. sb = new StringBuilder(m_normalizedCapacity); } else { sb.EnsureCapacity(sb.Length + m_normalizedCapacity); } int count = m_list.Count; for (int i = 0; i < count; i++) { Token token = m_list[i]; string value = token.Value; SqlTokenType type = token.Type; if (i > 0 && (Token.ValueFor.COMMA != value)) { sb.Append(' '); } if (type == SqlTokenType.NamedParameter) { int index = parameters.IndexOf(value); if (index >= 0) { HsqlParameter parameter = (HsqlParameter)parameters[index]; sb.Append(parameter.ToSqlLiteral()); } else if (strict) { throw new HsqlDataSourceException( "No binding for named parameter: " + value); // NOI18N } else // (index < 0 && strict == false) { // Effectively a named parameter pass through... // may be that we want to do multi-pass binding. sb.Append(value); } } // Currently, this never happens, due to the // (strict && ParameterMarkerCount > 0) check above. // stubbed in as a reminder that we might have to deal // with this case differently, for instance using // a bind-by-parameter-ordinal policy. else if (strict && (Token.ValueFor.QUESTION == value)) { throw new HsqlDataSourceException( "No binding for parameter marker: " + value); //NOI18N } else { sb.Append(value); } } return(sb); }
/// <summary> /// Creates a new list from the given SQL character sequence. /// </summary> /// <param name="sql">The SQL character sequence.</param> public TokenList(string sql) { Tokenizer tokenizer = new Tokenizer(sql); int parameterMarkerCount = 0; int namedParameterCount = 0; string tokenValue; while (!string.IsNullOrEmpty(tokenValue = tokenizer.GetNextAsString())) { string normalizedTokenValue = tokenizer.NormalizedToken; SqlTokenType type = tokenizer.TokenType; if (Token.ValueFor.COMMA != normalizedTokenValue) { // each non-comma token is space-separated // in the normalized form. m_normalizedCapacity++; } m_normalizedCapacity += normalizedTokenValue.Length; switch (type) { case SqlTokenType.ParameterMarker: { parameterMarkerCount++; break; } case SqlTokenType.NamedParameter: { namedParameterCount++; break; } } switch (type) { case SqlTokenType.IdentifierChain: { string qualiferPart = tokenizer.IdentifierChainPredecessor; string subjectPart = tokenValue; m_list.Add(new Token(normalizedTokenValue, qualiferPart, subjectPart)); break; } default: { m_list.Add(new Token(normalizedTokenValue, type)); break; } } } m_namedParameterTokenPositions = new int[namedParameterCount]; m_parameterMarkerTokenPositions = new int[parameterMarkerCount]; m_parameterTokenPositions = new int[namedParameterCount + parameterMarkerCount]; Dictionary <string, List <int> > nameToTokenOrdinals = new Dictionary <string, List <int> >(namedParameterCount); Dictionary <string, List <int> > nameToBindOrdinals = new Dictionary <string, List <int> >(namedParameterCount); int bindOrdinal = 0; int namedParameterIndex = 0; int parameterMarkerIndex = 0; int parameterIndex = 0; int count = m_list.Count; for (int i = 0; i < count; i++) { Token token = m_list[i]; SqlTokenType type = token.Type; switch (type) { case SqlTokenType.NamedParameter: { List <int> tokenOrdinals; List <int> bindOrdinals = null; string key = token.Value; if (!nameToTokenOrdinals.TryGetValue( key, out tokenOrdinals)) { tokenOrdinals = new List <int>(); bindOrdinals = new List <int>(); nameToTokenOrdinals[key] = tokenOrdinals; nameToBindOrdinals[key] = bindOrdinals; } tokenOrdinals.Add(i); bindOrdinals.Add(bindOrdinal++); m_namedParameterTokenPositions[ namedParameterIndex++] = i; m_parameterTokenPositions[ parameterIndex++] = i; break; } case SqlTokenType.ParameterMarker: { m_parameterMarkerTokenPositions[ parameterMarkerIndex++] = i; m_parameterTokenPositions[parameterIndex++] = i; break; } } } m_nameToTokenOrdinals = new Dictionary <string, int[]>(); foreach (string parameterName in nameToTokenOrdinals.Keys) { m_nameToTokenOrdinals[parameterName] = nameToTokenOrdinals[parameterName].ToArray(); } m_nameToBindOrdinals = new Dictionary <string, int[]>(); foreach (string parameterName in nameToBindOrdinals.Keys) { m_nameToBindOrdinals[parameterName] = nameToBindOrdinals[parameterName].ToArray(); } }
private void PeekOneChar(char c) { if (SqlTokenHelper.IsWhitespace(c)) { _tokenType = SqlTokenType.WHITESPACE; } else if (c == '(') { _tokenType = SqlTokenType.OPENED_PARENS; } else if (c == ')') { _tokenType = SqlTokenType.CLOSED_PARENS; } else if (c == ';') { _tokenType = SqlTokenType.DELIMITER; } else if (c == '\'') { _tokenType = SqlTokenType.QUOTE; var closed = false; while (HasRemaining()) { _charPosition++; var c2 = _sql[_charPosition]; if (c2 == '\'') { if (_charPosition + 1 < _stringLength) { _charPosition++; var c3 = _sql[_charPosition]; if (c3 != '\'') { _charPosition = _charPosition - 1; closed = true; break; } } else { closed = true; } } } if (closed) { return; } CalcPosition(); throw new SqlParseException(ExceptionMessageId.Esp2101, _sql, LineNumber, Position); } else if (IsWordStarted(c)) { _tokenType = SqlTokenType.WORD; while (HasRemaining()) { _charPosition++; var c2 = _sql[_charPosition]; if (c2 == '\'') { var closed = false; while (HasRemaining()) { _charPosition++; var c3 = _sql[_charPosition]; if (c3 == '\'') { if (_charPosition + 2 < _stringLength) { _charPosition++; var c4 = _sql[_charPosition + 1]; if (c4 != '\'') { _charPosition = _charPosition - 1; closed = true; break; } } else { closed = true; } } } if (closed) { return; } CalcPosition(); throw new SqlParseException(ExceptionMessageId.Esp2101, _sql, LineNumber, Position); } if (!SqlTokenHelper.IsWordPart(c2)) { _charPosition = _charPosition - 1; return; } } } else if (c == '\r' || c == '\n') { _tokenType = SqlTokenType.EOL; _currentLineNumber++; } else { _tokenType = SqlTokenType.OTHER; } }