コード例 #1
0
ファイル: MatchNumber.cs プロジェクト: exMifsan/lexer
        protected override Token IsMatchImpl(Tokenizer tokenizer)
        {
            var leftOperand = GetIntegers(tokenizer);

            if (leftOperand != null)
            {
                bool   isFloat     = false;
                string numberValue = leftOperand;
                if (tokenizer.Current == ".")
                {
                    tokenizer.Consume();

                    var rightOperand = GetIntegers(tokenizer);

                    if (rightOperand != null)
                    {
                        numberValue += "." + rightOperand;
                        isFloat      = true;
                    }
                }

                if (tokenizer.Current == "e")
                {
                    string rightOperand = "";

                    tokenizer.Consume();

                    if (tokenizer.Current == "-")
                    {
                        rightOperand = "-";
                        tokenizer.Consume();
                    }

                    rightOperand += GetIntegers(tokenizer);

                    if (rightOperand != null)
                    {
                        numberValue += "e" + rightOperand;
                        isFloat      = true;
                    }
                }

                if (!tokenizer.End())
                {
                    if (Char.IsLetter(tokenizer.Current, 0))
                    {
                        return(null);
                    }
                }

                if (isFloat)
                {
                    return(new Token(TokenType.FloatValue, numberValue));
                }

                return(new Token(TokenType.IntValue, leftOperand));
            }

            return(null);
        }
コード例 #2
0
ファイル: MatchComment.cs プロジェクト: exMifsan/lexer
        protected override Token IsMatchImpl(Tokenizer tokenizer)
        {
            if (new MatchKeyword(TokenType.Unknown, CommentDelim.Item1).IsMatch(tokenizer) == null)
            {
                return(null);
            }
            else
            {
                tokenizer.Consume();

                while (!tokenizer.End() && new MatchKeyword(TokenType.Unknown, CommentDelim.Item2).IsMatch(tokenizer) == null)
                {
                    tokenizer.Consume();
                }

                if (new MatchKeyword(TokenType.Unknown, CommentDelim.Item2).IsMatch(tokenizer) == null)
                {
                    tokenizer.Consume();
                }

                TokenType tokenType = TokenType.OpenBlockComment;

                if (CommentDelim == LINE_COMMENT)
                {
                    tokenType = TokenType.OpenLineComment;
                }

                return(new Token(tokenType, CommentDelim.Item1));
            }
        }
コード例 #3
0
        protected override Token IsMatchImpl(Tokenizer tokenizer)
        {
            var str = new StringBuilder();

            if (tokenizer.Current == StringDelim)
            {
                tokenizer.Consume();

                while (!tokenizer.End() && tokenizer.Current != StringDelim)
                {
                    str.Append(tokenizer.Current);
                    tokenizer.Consume();
                }

                if (tokenizer.End())
                {
                    return(null);
                }

                if (tokenizer.Current == StringDelim)
                {
                    tokenizer.Consume();
                }
            }

            TokenType tokenType;

            if (StringDelim == TIC)
            {
                tokenType = TokenType.CharValue;
                if (str.Length > 1)
                {
                    return(null);
                }
            }
            else
            {
                tokenType = TokenType.StringValue;
            }
            return(str.Length > 0 ? new Token(tokenType, StringDelim + str + StringDelim) : null);
        }
コード例 #4
0
ファイル: Lexer.cs プロジェクト: lulldev/lexer
        private Token Next()
        {
            if (Tokenizer.End())
            {
                return(new Token(TokenType.EOF));
            }

            return((from match in Matchers
                    let token = match.IsMatch(Tokenizer)
                                where token != null
                                select token).FirstOrDefault());
        }
コード例 #5
0
        protected override Token IsMatchImpl(Tokenizer tokenizer)
        {
            bool foundWhiteSpace = false;

            while (!tokenizer.End() && String.IsNullOrWhiteSpace(tokenizer.Current))
            {
                foundWhiteSpace = true;

                tokenizer.Consume();
            }

            return(foundWhiteSpace ? new Token(TokenType.WhiteSpace) : null);
        }
コード例 #6
0
ファイル: MatcherBase.cs プロジェクト: exMifsan/lexer
        public Token IsMatch(Tokenizer tokenizer)
        {
            if (tokenizer.End())
            {
                return(new Token(TokenType.EOF));
            }

            tokenizer.TakeSnapshot();

            var match = IsMatchImpl(tokenizer);

            if (match == null)
            {
                tokenizer.RollbackSnapshot();
            }
            else
            {
                tokenizer.CommitSnapshot();
            }

            return(match);
        }
コード例 #7
0
        protected override Token IsMatchImpl(Tokenizer tokenizer)
        {
            String current = null;

            while (!tokenizer.End() && !String.IsNullOrWhiteSpace(tokenizer.Current) && SpecialCharacters.All(m => m.Match != tokenizer.Current))
            {
                current += tokenizer.Current;
                tokenizer.Consume();
            }

            TokenType tokenType = TokenType.Unknown;

            if (current == null)
            {
                return(null);
            }

            if (Char.IsLetter(current, 0))
            {
                tokenType = TokenType.Identifier;
            }

            return(new Token(tokenType, current));
        }
コード例 #8
0
ファイル: Lexer.cs プロジェクト: lulldev/lexer
        public IEnumerable <Token> Lex()
        {
            Matchers = InitializeMatchList();

            var current = Next();

            while (current != null && current.TokenType != TokenType.EOF)
            {
                if (current.TokenType != TokenType.WhiteSpace)
                {
                    yield return(current);
                }

                if (!Tokenizer.End())
                {
                    if (current.TokenType == TokenType.OpenBlockComment)
                    {
                        yield return(new Token(TokenType.CloseBlockComment, "*/"));
                    }
                }

                current = Next();
            }
        }