Ejemplo n.º 1
0
        private Token ReadCommentRestOfLine(LexerIndex index)
        {
            var start = index.Index;

            while (index.Next())
            {
                if (index.Current == EndOfLine[0])
                {
                    if (index.Peek == EndOfLine[1])
                    {
                        index.Next();
                    }
                    break;
                }
            }

            return(new Token(index.Text, start, index.Index + 1, TokenType.Comment));
        }
Ejemplo n.º 2
0
 private bool MoveToFirstNonWhite(LexerIndex index)
 {
     while (IsWhiteSpace(index.Current))
     {
         if (!index.Next())
         {
             return(false);
         }
     }
     return(true);
 }
Ejemplo n.º 3
0
        private Token ReadNumber(LexerIndex index)
        {
            var start = index.Index;

            while (IsDigit(index.Peek) || index.Current == DecimalPoint)
            {
                index.Next();
            }

            return(new Token(index.Text, start, index.Index + 1, TokenType.Litteral));
        }
Ejemplo n.º 4
0
        private Token ReadWord(LexerIndex index)
        {
            var start = index.Index;

            while (IsLetterOrDigit(index.Peek))
            {
                index.Next();
            }

            return(new Token(index.Text, start, index.Index + 1, TokenType.Word));
        }
Ejemplo n.º 5
0
        private Token ReadComment(LexerIndex index)
        {
            var start = index.Index;

            index.Next();

            while (index.Next())
            {
                if (index.Current == CommentEnd[0])
                {
                    if (index.Peek == CommentEnd[1])
                    {
                        index.Next();
                        break;
                    }
                }
            }

            return(new Token(index.Text, start, index.Index + 1, TokenType.Comment));
        }
Ejemplo n.º 6
0
        private Token ReadVariable(LexerIndex index)
        {
            if (!index.PeekPeek.HasValue)
            {
                return(CreateSingelCharacterToken(index, TokenType.Unknown));
            }

            var start = index.Index;

            while (IsLetterOrDigit(index.Peek) || IsStartVariable(index.Peek))
            {
                index.Next();
            }

            return(new Token(index.Text, start, index.Index + 1, TokenType.Variable));
        }
Ejemplo n.º 7
0
        private Token ReadStringLitteral(LexerIndex index)
        {
            var  start         = index.Index;
            bool lastWasEscape = false;

            while (index.Next())
            {
                if (lastWasEscape)
                {
                    lastWasEscape = false;
                }
                else if (index.Current == StringLitteralEscape)
                {
                    lastWasEscape = true;
                }
                else if (index.Current == StringLitteralEnd)
                {
                    return(new Token(index.Text, start, index.Index - 1, TokenType.StringLitteral));
                }
            }

            return(new Token(index.Text, start, index.Index - 1, TokenType.Unknown));
        }
Ejemplo n.º 8
0
        public Token[] Tokenize(char[] text)
        {
            var index = new LexerIndex(text);
            var tokens = new List<Token>();

            while (index.Next())
            {
                if (!MoveToFirstNonWhite(index))
                    break;

                else if (IsSpecialCharacter(index.Current))
                {
                    if (index.Current == CommentRestOfLine[0] && index.Current == CommentRestOfLine[1])
                        tokens.Add(ReadCommentRestOfLine(index));
                    else if (index.Current == CommentStart[0] && index.Current == CommentStart[1])
                        tokens.Add(ReadComment(index));
                    else if (IsEndOfQuery(index.Current))
                        tokens.Add(CreateSingelCharacterToken(index, TokenType.EndOfQuery));
                    else
                        tokens.Add(CreateSingelCharacterToken(index, TokenType.SpecialCharacter));
                }
                else if (IsDigit(index.Current))
                {
                    tokens.Add(ReadNumber(index));
                }
                else if (IsStartVariable(index.Current))
                {
                    tokens.Add(ReadVariable(index));
                }
                else if (IsLetter(index.Current))
                {
                    tokens.Add(ReadWord(index));
                }
                else if (index.Current == StartParenthesis)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.StartParenthesis));
                }
                else if (index.Current == EndParenthesis)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.EndParenthesis));
                }
                else if (IsStartIdentifierDelimiter(index.Current))
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.StartIdentifierDelimiter));
                }
                else if (IsEndIdentifierDelimiter(index.Current))
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.EndIdentifierDelimiter));
                }
                else if (index.Current == SepparatorPoint)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.SepparatorPoint));
                }
                else if (index.Current == Comma)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.Comma));
                }
                else if (index.Current == StringLitteralStart)
                {
                    tokens.Add(ReadStringLitteral(index));
                }
                else
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.Unknown));
                }
            }

            return tokens.ToArray();
        }
Ejemplo n.º 9
0
        private Token ReadWord(LexerIndex index)
        {
            var start = index.Index;

            while (IsLetterOrDigit(index.Peek))
            {
                index.Next();
            }

            return new Token(index.Text, start, index.Index + 1, TokenType.Word);
        }
Ejemplo n.º 10
0
        private Token ReadVariable(LexerIndex index)
        {
            if (!index.PeekPeek.HasValue)
                return CreateSingelCharacterToken(index, TokenType.Unknown);

            var start = index.Index;

            while (IsLetterOrDigit(index.Peek) || IsStartVariable(index.Peek))
            {
                index.Next();
            }

            return new Token(index.Text, start, index.Index + 1, TokenType.Variable);
        }
Ejemplo n.º 11
0
        private Token ReadStringLitteral(LexerIndex index)
        {
            var start = index.Index;
            bool lastWasEscape = false;

            while (index.Next())
            {
                if (lastWasEscape)
                    lastWasEscape = false;
                else if (index.Current == StringLitteralEscape)
                    lastWasEscape = true;
                else if (index.Current == StringLitteralEnd)
                    return new Token(index.Text, start, index.Index - 1, TokenType.StringLitteral);
            }

            return new Token(index.Text, start, index.Index - 1, TokenType.Unknown);
        }
Ejemplo n.º 12
0
        private Token ReadNumber(LexerIndex index)
        {
            var start = index.Index;

            while (IsDigit(index.Peek) || index.Current == DecimalPoint)
            {
                index.Next();
            }

            return new Token(index.Text, start, index.Index + 1, TokenType.Litteral);
        }
Ejemplo n.º 13
0
        private Token ReadCommentRestOfLine(LexerIndex index)
        {
            var start = index.Index;

            while (index.Next())
            {
                if (index.Current == EndOfLine[0])
                {
                    if (index.Peek == EndOfLine[1])
                        index.Next();
                    break;
                }
            }

            return new Token(index.Text, start, index.Index + 1, TokenType.Comment);
        }
Ejemplo n.º 14
0
        private Token ReadComment(LexerIndex index)
        {
            var start = index.Index;

            index.Next();

            while (index.Next())
            {
                if (index.Current == CommentEnd[0])
                {
                    if (index.Peek == CommentEnd[1])
                    {
                        index.Next();
                        break;
                    }
                }
            }

            return new Token(index.Text, start, index.Index + 1, TokenType.Comment);
        }
Ejemplo n.º 15
0
 private bool MoveToFirstNonWhite(LexerIndex index)
 {
     while (IsWhiteSpace(index.Current))
     {
         if (!index.Next())
             return false;
     }
     return true;
 }
Ejemplo n.º 16
0
        public Token[] Tokenize(char[] text)
        {
            var index  = new LexerIndex(text);
            var tokens = new List <Token>();

            while (index.Next())
            {
                if (!MoveToFirstNonWhite(index))
                {
                    break;
                }

                else if (IsSpecialCharacter(index.Current))
                {
                    if (index.Current == CommentRestOfLine[0] && index.Current == CommentRestOfLine[1])
                    {
                        tokens.Add(ReadCommentRestOfLine(index));
                    }
                    else if (index.Current == CommentStart[0] && index.Current == CommentStart[1])
                    {
                        tokens.Add(ReadComment(index));
                    }
                    else if (IsEndOfQuery(index.Current))
                    {
                        tokens.Add(CreateSingelCharacterToken(index, TokenType.EndOfQuery));
                    }
                    else
                    {
                        tokens.Add(CreateSingelCharacterToken(index, TokenType.SpecialCharacter));
                    }
                }
                else if (IsDigit(index.Current))
                {
                    tokens.Add(ReadNumber(index));
                }
                else if (IsStartVariable(index.Current))
                {
                    tokens.Add(ReadVariable(index));
                }
                else if (IsLetter(index.Current))
                {
                    tokens.Add(ReadWord(index));
                }
                else if (index.Current == StartParenthesis)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.StartParenthesis));
                }
                else if (index.Current == EndParenthesis)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.EndParenthesis));
                }
                else if (IsStartIdentifierDelimiter(index.Current))
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.StartIdentifierDelimiter));
                }
                else if (IsEndIdentifierDelimiter(index.Current))
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.EndIdentifierDelimiter));
                }
                else if (index.Current == SepparatorPoint)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.SepparatorPoint));
                }
                else if (index.Current == Comma)
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.Comma));
                }
                else if (index.Current == StringLitteralStart)
                {
                    tokens.Add(ReadStringLitteral(index));
                }
                else
                {
                    tokens.Add(CreateSingelCharacterToken(index, TokenType.Unknown));
                }
            }

            return(tokens.ToArray());
        }