コード例 #1
0
        private static string TokenTypeToString(LexerTokenType token)
        {
            switch (token)
            {
            case LexerTokenType.NotStarted:
                return("Before first token");

            case LexerTokenType.Identifier:
                return("identifier");

            case LexerTokenType.And:
                return("&&");

            case LexerTokenType.Or:
                return("||");

            case LexerTokenType.Not:
                return("!");

            case LexerTokenType.OpenParen:
                return("(");

            case LexerTokenType.CloseParen:
                return(")");

            case LexerTokenType.End:
                return("End of expression");

            default:
                return("Can't get here, shut up the compiler");
            }
        }
コード例 #2
0
 public PropLogicLexer(string source)
 {
     this.source            = source;
     this.currentTokenType  = LexerTokenType.NotStarted;
     this.currentTokenValue = null;
     this.identifierRegex   = new Regex("^[A-Za-z_][0-9A-Za-z_]*");
 }
コード例 #3
0
 public Token(string value, LexerTokenType tokenType, int lineNum, int position)
 {
     Value     = value;
     TokenType = tokenType;
     LineNum   = lineNum;
     Position  = position;
 }
コード例 #4
0
ファイル: Token.cs プロジェクト: xavierzwirtz/sqltoolsservice
 /// <summary>
 /// Token class used by the lexer in Batch Parser
 /// </summary>
 internal Token(LexerTokenType tokenType, PositionStruct begin, PositionStruct end, string text, string filename)
 {
     TokenType = tokenType;
     Begin     = begin;
     End       = end;
     Text      = text;
     Filename  = filename;
 }
コード例 #5
0
 /// <summary>
 /// Class for a custom exception for the Batch Parser
 /// </summary>
 public BatchParserException(ErrorCode errorCode, Token token, string message)
     : base(message)
 {
     this.errorCode = errorCode;
     begin          = token.Begin;
     end            = token.End;
     text           = token.Text;
     tokenType      = token.TokenType;
 }
コード例 #6
0
 private bool Expect(LexerTokenType lexerTokenType)
 {
     if (LookaheadTokenType == lexerTokenType)
     {
         return(true);
     }
     RaiseError(ErrorCode.TokenExpected);
     return(false);
 }
コード例 #7
0
 private bool ExpectAndAccept(LexerTokenType lexerTokenType)
 {
     if (Accept(lexerTokenType))
     {
         return(true);
     }
     RaiseError(ErrorCode.TokenExpected);
     return(false);
 }
コード例 #8
0
 private bool Accept(LexerTokenType lexerTokenType)
 {
     if (LookaheadTokenType == lexerTokenType)
     {
         Accept();
         return(true);
     }
     return(false);
 }
コード例 #9
0
        public static bool LooksLikeQualifiedName(
            this TokenBuffer <LexerTokenType> tokenBuffer,
            LexerTokenType qualifierDelimeter)
        {
            var tokenType0 = tokenBuffer.LookAhead(0, LexerTokenType.Unknown);
            var tokenType1 = tokenBuffer.LookAhead(1, LexerTokenType.Unknown);

            return(tokenType0 == LexerTokenType.Identifier &&
                   tokenType1 == qualifierDelimeter);
        }
コード例 #10
0
 private bool MatchesKeyword(string keyword, LexerTokenType tokenType)
 {
     if (this.source.StartsWith(keyword))
     {
         this.currentTokenType  = tokenType;
         this.currentTokenValue = keyword;
         this.source            = this.source.Substring(keyword.Length);
         return(true);
     }
     return(false);
 }
コード例 #11
0
ファイル: Lexer.cs プロジェクト: xavierzwirtz/sqltoolsservice
        private void SetToken(LexerTokenType lexerTokenType)
        {
            string text = currentInput.FlushBufferedText();

            currentToken = new Token(
                lexerTokenType,
                tokenBeginPosition,
                new PositionStruct(currentInput.CurrentLine, currentInput.CurrentColumn, currentInput.CurrentOffset, currentInput.Filename),
                text,
                currentInput.Filename);
        }
コード例 #12
0
        public static string TokenTypeToCommandString(LexerTokenType lexerTokenType)
        {
            switch (lexerTokenType)
            {
            case LexerTokenType.Connect:
                return("Connect");

            case LexerTokenType.Ed:
                return("Ed");

            case LexerTokenType.ErrorCommand:
                return("Error");

            case LexerTokenType.Execute:
                return("!!");

            case LexerTokenType.Exit:
                return("Exit");

            case LexerTokenType.Help:
                return("Help");

            case LexerTokenType.List:
                return("List");

            case LexerTokenType.ListVar:
                return("ListVar");

            case LexerTokenType.OnError:
                return("On Error");

            case LexerTokenType.Out:
                return("Out");

            case LexerTokenType.Perftrace:
                return("PerfTrace");

            case LexerTokenType.Quit:
                return("Quit");

            case LexerTokenType.Reset:
                return("Reset");

            case LexerTokenType.Serverlist:
                return("ServerList");

            case LexerTokenType.Xml:
                return("Xml");

            default:
                Debug.Fail("Unknown batch parser command");
                return(lexerTokenType.ToString());
            }
        }
コード例 #13
0
        private bool MatchesIdentifier()
        {
            Match match = this.identifierRegex.Match(this.source);

            if (match.Success)
            {
                this.currentTokenType  = LexerTokenType.Identifier;
                this.currentTokenValue = this.source.Substring(0, match.Length);
                this.source            = this.source.Substring(match.Length);
                return(true);
            }
            return(false);
        }
コード例 #14
0
        public static StatementBuilder <StatementType, ParserTokenType, LexerTokenType> AcceptNameQualifier(
            this StatementBuilder <StatementType, ParserTokenType, LexerTokenType> statement,
            LexerTokenType qualifierDelimeter,
            ParserTokenType identifierType,
            ParserTokenType qualifierType)
        {
            if (statement.TokenBuffer.LooksLikeQualifiedName(qualifierDelimeter))
            {
                statement.Expect(LexerTokenType.Identifier, identifierType)
                .Expect(qualifierDelimeter, qualifierType);
            }

            return(statement);
        }
コード例 #15
0
        public void VerifyLexerTypeOfSqlCmdIFisExecute()
        {
            string         query       = ":!!if exist foo.txt del foo.txt";
            var            inputStream = GenerateStreamFromString(query);
            LexerTokenType type        = LexerTokenType.None;

            using (Lexer lexer = new Lexer(new StreamReader(inputStream), "Test.sql"))
            {
                lexer.ConsumeToken();
                type = lexer.CurrentTokenType;
            }
            // we are expecting the lexer type should to be Execute.
            Assert.Equal("Execute", type.ToString());
        }
コード例 #16
0
        private static Token GetSubToken(Token token, int startOffset, int endOffset, LexerTokenType?newTokenType = null)
        {
            LexerTokenType tokenType = newTokenType.HasValue ? newTokenType.Value : token.TokenType;
            string         text = token.Text.Substring(startOffset, endOffset - startOffset);
            string         filename = token.Begin.Filename;
            PositionStruct beginPos, endPos;

            int beginLine, beginColumn;

            LineInfo.CalculateLineColumnForOffset(token, startOffset, out beginLine, out beginColumn);
            beginPos = new PositionStruct(beginLine, beginColumn, token.Begin.Offset + startOffset, filename);

            int endLine, endColumn;

            LineInfo.CalculateLineColumnForOffset(token, endOffset, out endLine, out endColumn);
            endPos = new PositionStruct(endLine, endColumn, token.Begin.Offset + endOffset, filename);

            return(new Token(tokenType, beginPos, endPos, text, filename));
        }
コード例 #17
0
        private string GetTokenCode(LexerTokenType lexerTokenType)
        {
            switch (lexerTokenType)
            {
            case LexerTokenType.Text:
                return("T");

            case LexerTokenType.Whitespace:
                return("WS");

            case LexerTokenType.NewLine:
                return("NL");

            case LexerTokenType.Comment:
                return("C");

            default:
                return(lexerTokenType.ToString());
            }
        }
コード例 #18
0
        public bool NextToken()
        {
            if (this.currentTokenType == LexerTokenType.End)
            {
                return(false);
            }
            this.EatLeadingWhiteSpace();
            if (this.source.Length == 0)
            {
                this.currentTokenType  = LexerTokenType.End;
                this.currentTokenValue = string.Empty;
                return(false);
            }

            if (this.MatchesKeyword("&&", LexerTokenType.And))
            {
                return(true);
            }
            if (this.MatchesKeyword("||", LexerTokenType.Or))
            {
                return(true);
            }
            if (this.MatchesKeyword("!", LexerTokenType.Not))
            {
                return(true);
            }
            if (this.MatchesKeyword("(", LexerTokenType.OpenParen))
            {
                return(true);
            }
            if (this.MatchesKeyword(")", LexerTokenType.CloseParen))
            {
                return(true);
            }
            if (this.MatchesIdentifier())
            {
                return(true);
            }

            throw new ArgumentException("Invalid input stream in source");
        }
コード例 #19
0
ファイル: Lexer.cs プロジェクト: Paramecium13/NailSharp
        protected unsafe void Pop()
        {
            ulong data = 0;
            var   type = Generator.TokenType.Identifier;
            var   str  = builder.ToString();

            switch (TokenType)
            {
            case LexerTokenType.Unknown:
            case LexerTokenType.Word:
                if (str.StartsWith("@", StringComparison.Ordinal))
                {
                    type = Generator.TokenType.DepField;
                }
                break;

            case LexerTokenType.Float:
            {
                type = Generator.TokenType.Float;
                var x = double.Parse(str);
                data = *((ulong *)&x);
                break;
            }

            case LexerTokenType.Base10:
                if (IsNegative)
                {
                    type = Generator.TokenType.NegInt;
                    var x = long.Parse(str);
                    data = *((ulong *)&x);
                }
                else
                {
                    type = Generator.TokenType.NonNegInt;
                    data = ulong.Parse(str);
                }
                break;

            case LexerTokenType.Hex:
                if (IsNegative)
                {
                    type = Generator.TokenType.NegInt;
                    var x = long.Parse(str, System.Globalization.NumberStyles.HexNumber);
                    data = *((ulong *)&x);
                }
                else
                {
                    type = Generator.TokenType.NonNegInt;
                    data = ulong.Parse(str, System.Globalization.NumberStyles.HexNumber);
                }
                break;

            case LexerTokenType.Bin:
                throw new NotImplementedException();

            case LexerTokenType.String:
                type = Generator.TokenType.String;
                break;

            case LexerTokenType.Assignment:
            case LexerTokenType.Operator:
            case LexerTokenType.SyntaxSymbol:
                type = Generator.TokenType.Symbol;
                break;

            default:
                break;
            }

            if (str.Length != 0)
            {
                Tokens.Add(new Token(type, LineNumber, str, data));
            }
            builder.Clear();
            IsNegative = false;
            State      = LexerStatus.Base;
            TokenType  = LexerTokenType.Unknown;
        }
コード例 #20
0
        private void ParseLines()
        {
            do
            {
                LexerTokenType tokenType = LookaheadTokenType;
                switch (tokenType)
                {
                case LexerTokenType.OnError:
                    RemoveLastWhitespaceToken();
                    Token onErrorToken = LookaheadToken;
                    Accept();
                    ParseOnErrorCommand(onErrorToken);
                    break;

                case LexerTokenType.Eof:
                    if (tokenBuffer.Count > 0)
                    {
                        ExecuteBatch(1);
                    }
                    return;

                case LexerTokenType.Go:
                    RemoveLastWhitespaceToken();
                    Accept();
                    ParseGo();
                    break;

                case LexerTokenType.Include:
                    RemoveLastWhitespaceToken();
                    Accept();
                    ParseInclude();
                    break;

                case LexerTokenType.Comment:
                case LexerTokenType.NewLine:
                case LexerTokenType.Text:
                case LexerTokenType.Whitespace:
                    AddTokenToStringBuffer();
                    Accept();
                    break;

                case LexerTokenType.Setvar:
                    Token setvarToken = LookaheadToken;
                    RemoveLastWhitespaceToken();
                    Accept();
                    ParseSetvar(setvarToken);
                    break;

                case LexerTokenType.Connect:
                case LexerTokenType.Ed:
                case LexerTokenType.ErrorCommand:
                case LexerTokenType.Execute:
                case LexerTokenType.Exit:
                case LexerTokenType.Help:
                case LexerTokenType.List:
                case LexerTokenType.ListVar:
                case LexerTokenType.Out:
                case LexerTokenType.Perftrace:
                case LexerTokenType.Quit:
                case LexerTokenType.Reset:
                case LexerTokenType.Serverlist:
                case LexerTokenType.Xml:
                    RaiseError(ErrorCode.UnsupportedCommand,
                               string.Format(CultureInfo.CurrentCulture, SR.EE_ExecutionError_CommandNotSupported, tokenType));
                    break;

                default:
                    RaiseError(ErrorCode.UnrecognizedToken);
                    break;
                }
            } while (true);
        }
コード例 #21
0
        ////[Fact]
        ////public void UseStatement()
        ////{
        ////    this.TestLexer(
        ////        "use appx",
        ////        Token(LexerTokenType.UseKeyword, "use")
        ////            .Token(LexerTokenType.Whitespace, " ")
        ////            .Token(LexerTokenType.Identifier, "appx")
        ////        );
        ////}

        private static TokenListBuilder <LexerTokenType> Token(LexerTokenType type, string value)
        {
            var builder = new TokenListBuilder <LexerTokenType>();

            return(builder.Token(type, value));
        }
コード例 #22
0
 internal SqlCmdCommand(LexerTokenType tokenType)
 {
     this.LexerTokenType = tokenType;
 }
コード例 #23
0
        public static IEnumerable <Token <LexerTokenType> > LexTokens(string text, Position startPosition)
        {
            Position tokenStart = startPosition;
            bool     hadNewline = false;

            int currentPos = 0;

            while (currentPos < text.Length)
            {
                char           currentChar    = text[currentPos];
                var            followingChars = text.Skip(currentPos + 1).TakeWhile(c => !Newlines.Contains(c));
                LexerTokenType tokenType      = LexerTokenType.None;
                int            length         = 0;

                hadNewline = false;

                if (currentChar == '\r' || currentChar == '\n')
                {
                    tokenType = LexerTokenType.Newline;
                    // Take all consecutive newline characters, or just the immediate CRLF? ... just the next one.
                    length     = (currentChar == '\r' && text.Skip(currentPos + 1).FirstOrDefault() == '\n') ? 2 : 1;
                    hadNewline = true;
                }
                else if (char.IsWhiteSpace(currentChar))
                {
                    // return all of the consecutive whitespace...
                    tokenType = LexerTokenType.Whitespace;
                    length    = 1 + followingChars.TakeWhile(c => char.IsWhiteSpace(c)).Count();
                }
                else if (currentChar == 'u' &&
                         currentPos + 2 < text.Length &&
                         text[currentPos + 1] == 's' &&
                         text[currentPos + 2] == 'e' &&
                         (currentPos + 3 >= text.Length || char.IsWhiteSpace(text[currentPos + 3])))
                {
                    tokenType = LexerTokenType.UseKeyword;
                    length    = 3;
                }
                else if (currentChar == '#')
                {
                    tokenType = LexerTokenType.Hash;
                    length    = 1;
                }
                else if (currentChar == ':')
                {
                    tokenType = LexerTokenType.Colon;
                    length    = 1;
                }
                else if (currentChar == '.')
                {
                    tokenType = LexerTokenType.Period;
                    length    = 1;
                }
                else if (currentChar == '=')
                {
                    tokenType = LexerTokenType.Equals;
                    length    = 1;
                }
                else if (currentChar == '"')
                {
                    tokenType = LexerTokenType.DoubleQuote;
                    length    = 1;
                }
                else if (currentChar == '\'')
                {
                    tokenType = LexerTokenType.SingleQuote;
                    length    = 1;
                }
                else if (char.IsLetter(currentChar))
                {
                    tokenType = LexerTokenType.Identifier;
                    length    = 1 + followingChars.TakeWhile(c => char.IsLetterOrDigit(c) || c == '_').Count();
                }
                else if (char.IsDigit(currentChar))
                {
                    tokenType = LexerTokenType.Number;
                    length    = 1 + followingChars.TakeWhile(c => char.IsDigit(c)).Count();
                }
                else
                {
                    tokenType = LexerTokenType.Value;
                    length    = 1 + followingChars.TakeWhile(c => !ValueTokenEnders.Contains(c)).Count();
                }

                if (length <= 0)
                {
                    throw new Exception("didn't eat any characters!");
                }

                Range tokenRange = new Range(tokenStart, length);

                yield return(new Token <LexerTokenType>(
                                 tokenType,
                                 text.Substring(currentPos, length),
                                 tokenRange));

                currentPos += length;
                tokenStart  = tokenRange.End;

                // After newlines, keep the offset, but bump the line and reset the column
                if (hadNewline)
                {
                    tokenStart = new Position(tokenStart.Offset, tokenStart.Line + 1, 0);
                }
            }
        }
コード例 #24
0
ファイル: Lexer.cs プロジェクト: Paramecium13/NailSharp
        private void BaseReadChar(char c)
        {
            if (char.IsWhiteSpace(c))
            {
                Pop(); return;
            }
            if (LoneSymbols.Contains(c))             // It's a one char operator.
            {
                Pop();
                TokenType = LexerTokenType.Operator;
                switch (c)
                {
                case '\\':
                    ContinueLine = true;
                    return;

                case '-':
                    State = LexerStatus.SymbolMinus;
                    builder.Append(c);
                    return;

                case ';':
                    Pop();
                    Tokens.Add(new Token(Generator.TokenType.NewLine, LineNumber, "", 0));
                    return;

                default:
                    builder.Append(c);
                    Pop();
                    return;
                }
            }
            if (c == '"')
            {
                Pop();
                State     = LexerStatus.StringBase;
                TokenType = LexerTokenType.String;
                return;
            }
            if (MultiSymbols.Contains(c))
            {
                switch (c)
                {
                case '/':
                    State = LexerStatus.SymbolSlash;
                    builder.Append(c);
                    return;

                case '|':
                    State = LexerStatus.SymbolLine;
                    builder.Append(c);
                    return;

                default:        break;
                }
            }
            switch (State)
            {
            case LexerStatus.Base:
                if (char.IsDigit(c))
                {
                    builder.Append(c);
                    TokenType = LexerTokenType.Base10;
                    if (c == '0')
                    {
                        State = LexerStatus.Zero;
                    }
                    else
                    {
                        State = LexerStatus.Number;
                    }
                }
                else if (c == '@')
                {
                    builder.Append(c);
                    TokenType = LexerTokenType.Word;
                    State     = LexerStatus.SymbolAt;
                }
                else
                {
                    builder.Append(c);
                    TokenType = LexerTokenType.Word;
                    State     = LexerStatus.Identifier;
                }
                break;

            case LexerStatus.Zero:
                switch (c)
                {
                case 'x':
                case 'X':
                    State = LexerStatus.Hex;
                    break;

                case 'b':
                case 'B':
                    State = LexerStatus.Bin;
                    break;

                case '.':
                    State     = LexerStatus.Decimal;
                    TokenType = LexerTokenType.Float;
                    break;

                default:
                    throw new NotImplementedException("Octal not supported...");
                }
                builder.Append(c);
                break;

            case LexerStatus.Number:
                if (char.IsDigit(c))
                {
                    builder.Append(c);
                }
                else if (c == '_')
                {
                    break;
                }
                else if (c == '.')
                {
                    State     = LexerStatus.Decimal;
                    TokenType = LexerTokenType.Float;
                    builder.Append(c);
                }
                else
                {
                    throw new ApplicationException("Tokenization Error: Invalid number...");
                }
                break;

            case LexerStatus.Hex:
                if ((c >= '0' && c <= '9') || (c >= 'A' && c <= 'F') || (c >= 'a' && c <= 'f'))
                {
                    builder.Append(c);
                }
                else if (c == '_')
                {
                    break;
                }
                else
                {
                    throw new ApplicationException("Tokenization Error: Invalid number...");
                }
                break;

            case LexerStatus.Bin:
                if (c == '0' || c == '1')
                {
                    builder.Append(c);
                }
                else if (c == '_')
                {
                    break;
                }
                else
                {
                    throw new ApplicationException("Tokenization Error: Invalid number...");
                }
                break;

            case LexerStatus.Decimal:
                if (char.IsDigit(c))
                {
                    builder.Append(c);
                }
                else if (c == '_')
                {
                    break;
                }
                else
                {
                    throw new ApplicationException("Tokenization Error: Invalid number...");
                }
                break;

            case LexerStatus.Identifier:
                builder.Append(c);
                break;

            case LexerStatus.SymbolLine:
                throw new NotImplementedException();

            //break;
            case LexerStatus.SymbolSlash:
                if (c == '\\')
                {
                    State = LexerStatus.CommentSingleline;
                    builder.Clear();
                }
                else
                {
                    Pop();
                }
                break;

            case LexerStatus.SymbolAt:
                if (c == '\"')
                {
                    builder.Clear();
                    State     = LexerStatus.LitString;
                    TokenType = LexerTokenType.String;
                    break;
                }
                if (char.IsDigit(c))
                {
                    throw new ApplicationException("...");
                }
                builder.Append(c);
                TokenType = LexerTokenType.Word;
                State     = LexerStatus.Identifier;
                break;

            case LexerStatus.SymbolMinus:
                if (char.IsDigit(c))
                {
                    builder.Append(c);
                    State      = LexerStatus.Number;
                    TokenType  = LexerTokenType.Base10;
                    IsNegative = true;
                }
                else
                {
                    TokenType = LexerTokenType.SyntaxSymbol;
                    Pop();
                    State = LexerStatus.Base;
                }
                break;

            default:
                break;
            }
        }
コード例 #25
0
 /// <summary>
 /// 添加到Token列表中
 /// </summary>
 /// <param name="str"></param>
 /// <param name="type"></param>
 protected void AddToken(string str, LexerTokenType type)
 {
     Tokens.Add(new Token <TKeyword>(str, type, lineNum, charPosition - wordLength));
     wordLength = 0;
 }
コード例 #26
0
 public Token(string value, LexerTokenType tokenType, int lineNum, int position, TEnum keyword)
     : this(value, tokenType, lineNum, position)
 {
     Keyword = keyword;
 }
コード例 #27
0
        public static IEnumerable <Token <LexerTokenType> > LexTokens(string text, Position startPosition)
        {
            Position tokenStart = startPosition;
            bool     hadNewline = false;

            int currentPos = 0;

            while (currentPos < text.Length)
            {
                char           currentChar    = text[currentPos];
                var            followingChars = text.Skip(currentPos + 1).TakeWhile(c => !Newlines.Contains(c));
                LexerTokenType tokenType      = LexerTokenType.None;
                int            length         = 0;

                hadNewline = false;

                if (currentChar == '\r' || currentChar == '\n')
                {
                    tokenType = LexerTokenType.Newline;
                    // Take all consecutive newline characters, or just the immediate CRLF? ... just the next one.
                    length     = (currentChar == '\r' && text.Skip(currentPos + 1).FirstOrDefault() == '\n') ? 2 : 1;
                    hadNewline = true;
                }
                else if (currentChar == '#')
                {
                    // The remainder of the line is comment!
                    tokenType = LexerTokenType.Comment;
                    length    = 1 + followingChars.Count();
                }
                else if (currentChar == '{')
                {
                    tokenType = LexerTokenType.LeftBrace;
                    length    = 1;
                }
                else if (currentChar == '}')
                {
                    tokenType = LexerTokenType.RightBrace;
                    length    = 1;
                }
                else if (currentChar == '[')
                {
                    tokenType = LexerTokenType.LeftBracket;
                    length    = 1;
                }
                else if (currentChar == ']')
                {
                    tokenType = LexerTokenType.RightBracket;
                    length    = 1;
                }
                else if (currentChar == ',')
                {
                    tokenType = LexerTokenType.Comma;
                    length    = 1;
                }
                else if (currentChar == ':')
                {
                    tokenType = LexerTokenType.Colon;
                    length    = 1;
                }
                else if (currentChar == '\\')
                {
                    // Character after the backslash (if any!) is the raw...
                    tokenType = LexerTokenType.Escape;
                    length    = 1 + followingChars.Take(1).Count();
                    // TODO: Flag as error if it's not known?
                }
                else if (char.IsWhiteSpace(currentChar))
                {
                    // return all of the consecutive whitespace...
                    tokenType = LexerTokenType.Whitespace;
                    length    = 1 + followingChars.TakeWhile(c => char.IsWhiteSpace(c)).Count();
                }
                else if (currentChar == 't' &&
                         currentPos + 3 < text.Length &&
                         text[currentPos + 1] == 'y' &&
                         text[currentPos + 2] == 'p' &&
                         text[currentPos + 3] == 'e' &&
                         (currentPos + 4 >= text.Length || char.IsWhiteSpace(text[currentPos + 4])))
                {
                    tokenType = LexerTokenType.TypeKeyword;
                    length    = 4;
                }
                else if (char.IsLetter(currentChar))
                {
                    tokenType = LexerTokenType.Identifier;
                    length    = 1 + followingChars.TakeWhile(c => char.IsLetterOrDigit(c) || c == '_').Count();
                }
                else if (char.IsDigit(currentChar))
                {
                    tokenType = LexerTokenType.Number;
                    length    = 1 + followingChars.TakeWhile(c => char.IsDigit(c)).Count();
                }
                else
                {
                    tokenType = LexerTokenType.Value;
                    length    = 1 + followingChars.TakeWhile(c => !ValueTokenEnders.Contains(c)).Count();
                }

                if (length <= 0)
                {
                    throw new Exception("didn't eat any characters!");
                }

                Range tokenRange = new Range(tokenStart, length);

                yield return(new Token <LexerTokenType>(
                                 tokenType,
                                 text.Substring(currentPos, length),
                                 tokenRange));

                currentPos += length;
                tokenStart  = tokenRange.End;

                // After newlines, keep the offset, but bump the line and reset the column
                if (hadNewline)
                {
                    tokenStart = new Position(tokenStart.Offset, tokenStart.Line + 1, 0);
                }
            }
        }