private Token ReadWord(LexerIndex index) { var start = index.Index; while (IsLetterOrDigit(index.Peek)) { index.Next(); } return(new Token(index.Text, start, index.Index + 1, TokenType.Word)); }
private bool MoveToFirstNonWhite(LexerIndex index) { while (IsWhiteSpace(index.Current)) { if (!index.Next()) { return(false); } } return(true); }
private Token ReadNumber(LexerIndex index) { var start = index.Index; while (IsDigit(index.Peek) || index.Current == DecimalPoint) { index.Next(); } return(new Token(index.Text, start, index.Index + 1, TokenType.Litteral)); }
private Token ReadVariable(LexerIndex index) { if (!index.PeekPeek.HasValue) { return(CreateSingelCharacterToken(index, TokenType.Unknown)); } var start = index.Index; while (IsLetterOrDigit(index.Peek) || IsStartVariable(index.Peek)) { index.Next(); } return(new Token(index.Text, start, index.Index + 1, TokenType.Variable)); }
private Token ReadCommentRestOfLine(LexerIndex index) { var start = index.Index; while (index.Next()) { if (index.Current == EndOfLine[0]) { if (index.Peek == EndOfLine[1]) { index.Next(); } break; } } return(new Token(index.Text, start, index.Index + 1, TokenType.Comment)); }
private Token ReadComment(LexerIndex index) { var start = index.Index; index.Next(); while (index.Next()) { if (index.Current == CommentEnd[0]) { if (index.Peek == CommentEnd[1]) { index.Next(); break; } } } return(new Token(index.Text, start, index.Index + 1, TokenType.Comment)); }
private Token ReadStringLitteral(LexerIndex index) { var start = index.Index; bool lastWasEscape = false; while (index.Next()) { if (lastWasEscape) { lastWasEscape = false; } else if (index.Current == StringLitteralEscape) { lastWasEscape = true; } else if (index.Current == StringLitteralEnd) { return(new Token(index.Text, start, index.Index - 1, TokenType.StringLitteral)); } } return(new Token(index.Text, start, index.Index - 1, TokenType.Unknown)); }
private Token ReadVariable(LexerIndex index) { if (!index.PeekPeek.HasValue) return CreateSingelCharacterToken(index, TokenType.Unknown); var start = index.Index; while (IsLetterOrDigit(index.Peek) || IsStartVariable(index.Peek)) { index.Next(); } return new Token(index.Text, start, index.Index + 1, TokenType.Variable); }
private Token ReadStringLitteral(LexerIndex index) { var start = index.Index; bool lastWasEscape = false; while (index.Next()) { if (lastWasEscape) lastWasEscape = false; else if (index.Current == StringLitteralEscape) lastWasEscape = true; else if (index.Current == StringLitteralEnd) return new Token(index.Text, start, index.Index - 1, TokenType.StringLitteral); } return new Token(index.Text, start, index.Index - 1, TokenType.Unknown); }
private Token ReadNumber(LexerIndex index) { var start = index.Index; while (IsDigit(index.Peek) || index.Current == DecimalPoint) { index.Next(); } return new Token(index.Text, start, index.Index + 1, TokenType.Litteral); }
private Token ReadCommentRestOfLine(LexerIndex index) { var start = index.Index; while (index.Next()) { if (index.Current == EndOfLine[0]) { if (index.Peek == EndOfLine[1]) index.Next(); break; } } return new Token(index.Text, start, index.Index + 1, TokenType.Comment); }
private Token ReadComment(LexerIndex index) { var start = index.Index; index.Next(); while (index.Next()) { if (index.Current == CommentEnd[0]) { if (index.Peek == CommentEnd[1]) { index.Next(); break; } } } return new Token(index.Text, start, index.Index + 1, TokenType.Comment); }
private bool MoveToFirstNonWhite(LexerIndex index) { while (IsWhiteSpace(index.Current)) { if (!index.Next()) return false; } return true; }
private static Token CreateSingelCharacterToken(LexerIndex index, TokenType type) { return(new Token(index.Text, index.Index, index.Index + 1, type)); }
private Token ReadWord(LexerIndex index) { var start = index.Index; while (IsLetterOrDigit(index.Peek)) { index.Next(); } return new Token(index.Text, start, index.Index + 1, TokenType.Word); }
private static Token CreateSingelCharacterToken(LexerIndex index, TokenType type) { return new Token(index.Text, index.Index, index.Index + 1, type); }
public Token[] Tokenize(char[] text) { var index = new LexerIndex(text); var tokens = new List<Token>(); while (index.Next()) { if (!MoveToFirstNonWhite(index)) break; else if (IsSpecialCharacter(index.Current)) { if (index.Current == CommentRestOfLine[0] && index.Current == CommentRestOfLine[1]) tokens.Add(ReadCommentRestOfLine(index)); else if (index.Current == CommentStart[0] && index.Current == CommentStart[1]) tokens.Add(ReadComment(index)); else if (IsEndOfQuery(index.Current)) tokens.Add(CreateSingelCharacterToken(index, TokenType.EndOfQuery)); else tokens.Add(CreateSingelCharacterToken(index, TokenType.SpecialCharacter)); } else if (IsDigit(index.Current)) { tokens.Add(ReadNumber(index)); } else if (IsStartVariable(index.Current)) { tokens.Add(ReadVariable(index)); } else if (IsLetter(index.Current)) { tokens.Add(ReadWord(index)); } else if (index.Current == StartParenthesis) { tokens.Add(CreateSingelCharacterToken(index, TokenType.StartParenthesis)); } else if (index.Current == EndParenthesis) { tokens.Add(CreateSingelCharacterToken(index, TokenType.EndParenthesis)); } else if (IsStartIdentifierDelimiter(index.Current)) { tokens.Add(CreateSingelCharacterToken(index, TokenType.StartIdentifierDelimiter)); } else if (IsEndIdentifierDelimiter(index.Current)) { tokens.Add(CreateSingelCharacterToken(index, TokenType.EndIdentifierDelimiter)); } else if (index.Current == SepparatorPoint) { tokens.Add(CreateSingelCharacterToken(index, TokenType.SepparatorPoint)); } else if (index.Current == Comma) { tokens.Add(CreateSingelCharacterToken(index, TokenType.Comma)); } else if (index.Current == StringLitteralStart) { tokens.Add(ReadStringLitteral(index)); } else { tokens.Add(CreateSingelCharacterToken(index, TokenType.Unknown)); } } return tokens.ToArray(); }
public Token[] Tokenize(char[] text) { var index = new LexerIndex(text); var tokens = new List <Token>(); while (index.Next()) { if (!MoveToFirstNonWhite(index)) { break; } else if (IsSpecialCharacter(index.Current)) { if (index.Current == CommentRestOfLine[0] && index.Current == CommentRestOfLine[1]) { tokens.Add(ReadCommentRestOfLine(index)); } else if (index.Current == CommentStart[0] && index.Current == CommentStart[1]) { tokens.Add(ReadComment(index)); } else if (IsEndOfQuery(index.Current)) { tokens.Add(CreateSingelCharacterToken(index, TokenType.EndOfQuery)); } else { tokens.Add(CreateSingelCharacterToken(index, TokenType.SpecialCharacter)); } } else if (IsDigit(index.Current)) { tokens.Add(ReadNumber(index)); } else if (IsStartVariable(index.Current)) { tokens.Add(ReadVariable(index)); } else if (IsLetter(index.Current)) { tokens.Add(ReadWord(index)); } else if (index.Current == StartParenthesis) { tokens.Add(CreateSingelCharacterToken(index, TokenType.StartParenthesis)); } else if (index.Current == EndParenthesis) { tokens.Add(CreateSingelCharacterToken(index, TokenType.EndParenthesis)); } else if (IsStartIdentifierDelimiter(index.Current)) { tokens.Add(CreateSingelCharacterToken(index, TokenType.StartIdentifierDelimiter)); } else if (IsEndIdentifierDelimiter(index.Current)) { tokens.Add(CreateSingelCharacterToken(index, TokenType.EndIdentifierDelimiter)); } else if (index.Current == SepparatorPoint) { tokens.Add(CreateSingelCharacterToken(index, TokenType.SepparatorPoint)); } else if (index.Current == Comma) { tokens.Add(CreateSingelCharacterToken(index, TokenType.Comma)); } else if (index.Current == StringLitteralStart) { tokens.Add(ReadStringLitteral(index)); } else { tokens.Add(CreateSingelCharacterToken(index, TokenType.Unknown)); } } return(tokens.ToArray()); }