예제 #1
0
        /// <summary>
        /// Lexes a compilation unit.
        /// </summary>
        /// <param name="text"> The text to lex. </param>
        /// <param name="defines"> The set of preprocessor symbols defined in source. 
        /// Is modified to include results of #define/#undef found in this compilation unit. </param>
        /// <param name="lineMap">The LineMap contains the source text to file/line mapping
        /// as a result of #line directives. </param>
        /// <param name="includeComments"> Should comment tokens be generated. </param>
        /// <returns></returns>
        public Token[] Lex(char[] text, IDictionary defines, LineMap lineMap, bool includeComments) {
            // initialize
            _text = new TextBuffer(text);
            _defines = defines;
            _lineMap = lineMap;
            _includeComments = includeComments;

            LexFile();

            _text = null;
            _defines = null;
            _lineMap = null;
            _includeComments = false;

            return (Token[])_tokens.ToArray(typeof(Token));
        }
예제 #2
0
        public PreprocessorLine ParseNextLine(TextBuffer text, IDictionary defines) {
            PreprocessorLine line = null;

            do {
                lexer.SkipWhiteSpace();
                if (lexer.EOF) {
                    line = new PreprocessorLine(PreprocessorTokenType.EndOfLine);
                }
                else if (text.PeekChar() != '#') {
                    lexer.IgnoreRestOfLine();
                }
                else {
                    line = Parse(text, defines);
                }
            } while (line == null);

            return line;
        }
예제 #3
0
파일: Lexer.cs 프로젝트: fugaku/scriptsharp
        /// <summary>
        /// Lexes a block of C# text.
        /// Does not handle preprocessor directives.
        /// Will stop at EOF or before a # found as the first non-whitespace character
        /// on a line. 
        /// </summary>
        /// <param name="text"> Buffer containing the text to lex. </param>
        /// <param name="tokenList"> List of tokens to add to. </param>
        /// <param name="includeComments"> Should comment tokens be generated. </param>
        /// <returns> true if a preprocessor directive was found, or false on end of buffer. </returns>
        public bool LexBlock(TextBuffer text, IList tokenList, bool includeComments) {
            Debug.Assert(_keywords != null);
            Debug.Assert(_nameTable != null);

            Debug.Assert(_text == null);
            Debug.Assert(_tokenList == null);
            Debug.Assert(_lastLine == -1);

            _text = text;
            _tokenList = tokenList;
            _lastLine = text.Line - 1;
            _includeComments = includeComments;

            // get the tokens
            Token next = null;
            do {
                next = NextToken();

                if (next == null) {
                    // pre-processor directive
                    break;
                }

                if (next.Type != TokenType.Error && next.Type != TokenType.EOF) {
                    tokenList.Add(next);
                }
            } while (next.Type != TokenType.EOF);

            _tokenList = null;
            _text = null;
            _lastLine = -1;
            _includeComments = false;
            ClearPosition();

            return next == null;
        }
예제 #4
0
파일: Lexer.cs 프로젝트: fugaku/scriptsharp
 /// <summary>
 /// Lexes a block of C# text.
 /// Does not handle preprocessor directives.
 /// Will stop at EOF or before a # found as the first non-whitespace character
 /// on a line. Does not include comment tokens.
 /// </summary>
 /// <param name="text"> Buffer containing the text to lex. </param>
 /// <param name="tokenList"> List of tokens to add to. </param>
 /// <returns> true if a preprocessor directive was found, or false on end of buffer. </returns>
 public bool LexBlock(TextBuffer text, IList tokenList) {
     return LexBlock(text, tokenList, false);
 }
        public PreprocessorLine Parse(TextBuffer text, IDictionary defines)
        {
            this.text = text;
            this.defines = defines;

            Debug.Assert(text.PeekChar() == '#');
            text.NextChar();

            PreprocessorToken token = NextToken();
            PreprocessorTokenType type = token.Type;
            switch (type) {
                case PreprocessorTokenType.Define:
                case PreprocessorTokenType.Undef:
                    token = Eat(PreprocessorTokenType.Identifier);
                    EatEndOfLine();
                    if (token != null) {
                        return new PreprocessorDeclarationLine(type, ((PreprocessorIdentifierToken)token).Value);
                    }
                    else {
                        return null;
                    }

                case PreprocessorTokenType.Warning:
                case PreprocessorTokenType.Error:
                    return new PreprocessorControlLine(type, lexer.GetRestOfLine());

                case PreprocessorTokenType.Line:
                    // hidden, default
                    type = PeekType();
                    if (type == PreprocessorTokenType.Default || type == PreprocessorTokenType.Hidden) {
                        NextToken();
                        EatEndOfLine();
                        return new PreprocessorLine(type);
                    }

                    token = Eat(PreprocessorTokenType.Int);
                    if (token != null) {
                        int line = ((PreprocessorIntToken)token).Value;
                        string file = null;
                        if (PeekType() == PreprocessorTokenType.String) {
                            file = ((PreprocessorStringToken)NextToken()).Value;
                        }
                        EatEndOfLine();

                        return new PreprocessorLineNumberLine(line, file);
                    }
                    else {
                        lexer.IgnoreRestOfLine();
                        return null;
                    }

                case PreprocessorTokenType.If:
                case PreprocessorTokenType.Elif:
                    return new PreprocessorIfLine(type, EvalExpression());

                case PreprocessorTokenType.Else:
                case PreprocessorTokenType.Endif:
                    return new PreprocessorLine(type);

                case PreprocessorTokenType.Region:
                case PreprocessorTokenType.EndRegion:
                    lexer.IgnoreRestOfLine();
                    return new PreprocessorLine(type);

                case PreprocessorTokenType.Pragma:
                    lexer.IgnoreRestOfLine();
                    return new PreprocessorLine(type);
                default:
                    ReportError(PreprocessorError.UnexpectedDirective, token.Position);
                    return null;
            }
        }
예제 #6
0
        public PreprocessorLine Parse(TextBuffer text, IDictionary defines)
        {
            this.text    = text;
            this.defines = defines;

            Debug.Assert(text.PeekChar() == '#');
            text.NextChar();

            PreprocessorToken     token = NextToken();
            PreprocessorTokenType type  = token.Type;

            switch (type)
            {
            case PreprocessorTokenType.Define:
            case PreprocessorTokenType.Undef:
                token = Eat(PreprocessorTokenType.Identifier);
                EatEndOfLine();
                if (token != null)
                {
                    return(new PreprocessorDeclarationLine(type, ((PreprocessorIdentifierToken)token).Value));
                }
                else
                {
                    return(null);
                }

            case PreprocessorTokenType.Warning:
            case PreprocessorTokenType.Error:
                return(new PreprocessorControlLine(type, lexer.GetRestOfLine()));

            case PreprocessorTokenType.Line:
                // hidden, default
                type = PeekType();
                if (type == PreprocessorTokenType.Default || type == PreprocessorTokenType.Hidden)
                {
                    NextToken();
                    EatEndOfLine();
                    return(new PreprocessorLine(type));
                }

                token = Eat(PreprocessorTokenType.Int);
                if (token != null)
                {
                    int    line = ((PreprocessorIntToken)token).Value;
                    string file = null;
                    if (PeekType() == PreprocessorTokenType.String)
                    {
                        file = ((PreprocessorStringToken)NextToken()).Value;
                    }
                    EatEndOfLine();

                    return(new PreprocessorLineNumberLine(line, file));
                }
                else
                {
                    lexer.IgnoreRestOfLine();
                    return(null);
                }

            case PreprocessorTokenType.If:
            case PreprocessorTokenType.Elif:
                return(new PreprocessorIfLine(type, EvalExpression()));

            case PreprocessorTokenType.Else:
            case PreprocessorTokenType.Endif:
                return(new PreprocessorLine(type));

            case PreprocessorTokenType.Region:
            case PreprocessorTokenType.EndRegion:
                lexer.IgnoreRestOfLine();
                return(new PreprocessorLine(type));

            case PreprocessorTokenType.Pragma:
                lexer.IgnoreRestOfLine();
                return(new PreprocessorLine(type));

            default:
                ReportError(PreprocessorError.UnexpectedDirective, token.Position);
                return(null);
            }
        }
예제 #7
0
 /// <summary>
 /// Lexes a block of C# text.
 /// Does not handle preprocessor directives.
 /// Will stop at EOF or before a # found as the first non-whitespace character
 /// on a line. Does not include comment tokens.
 /// </summary>
 /// <param name="text"> Buffer containing the text to lex. </param>
 /// <param name="tokenList"> List of tokens to add to. </param>
 /// <returns> true if a preprocessor directive was found, or false on end of buffer. </returns>
 public bool LexBlock(TextBuffer text, IList tokenList)
 {
     return(LexBlock(text, tokenList, false));
 }
예제 #8
0
        public PreprocessorToken NextToken(TextBuffer text)
        {
            _text = text;

            SkipWhiteSpace();
            BufferPosition position = text.Position;

            char ch = PeekChar();
            if (ch == '\0' || IsLineSeparator(ch)) {
                return NewPPToken(PreprocessorTokenType.EndOfLine, position);
            }

            ch = NextChar();
            switch (ch) {
                case '0':
                case '1':
                case '2':
                case '3':
                case '4':
                case '5':
                case '6':
                case '7':
                case '8':
                case '9': {
                        int intValue = (ch - '0');
                        while (IsDigit(PeekChar())) {
                            int value10 = intValue * 10;
                            if (value10 < intValue) {
                                ReportError(LexError.NumericConstantOverflow);
                            }
                            else {
                                intValue = value10 + (NextChar() - '0');
                            }
                        }

                        return new PreprocessorIntToken(intValue, position);
                    }

                case '=':
                    if (PeekChar() == '=') {
                        NextChar();
                        return NewPPToken(PreprocessorTokenType.EqualEqual, position);
                    }
                    break;

                case '!':
                    if (PeekChar() == '=') {
                        NextChar();
                        return NewPPToken(PreprocessorTokenType.NotEqual, position);
                    }
                    else {
                        return NewPPToken(PreprocessorTokenType.Not, position);
                    }

                case '&':
                    if (PeekChar() == '&') {
                        NextChar();
                        return NewPPToken(PreprocessorTokenType.And, position);
                    }
                    break;

                case '|':
                    if (PeekChar() == '|') {
                        NextChar();
                        return NewPPToken(PreprocessorTokenType.Or, position);
                    }
                    break;

                case '(':
                    return NewPPToken(PreprocessorTokenType.OpenParen, position);

                case ')':
                    return NewPPToken(PreprocessorTokenType.CloseParen, position);

                case '"':
                    _value.Length = 0;
                    while ((ch = PeekChar()) != '"') {
                        if (EOF) {
                            ReportError(LexError.UnexpectedEndOfFileString);
                            break;
                        }
                        else if (IsLineSeparator(ch)) {
                            ReportError(LexError.WhiteSpaceInConstant);
                            break;
                        }
                        _value.Append(ch);
                        NextChar();
                    }
                    NextChar();
                    return new PreprocessorStringToken(_value.ToString(), position);

                case '/':
                    if (PeekChar() == '/') {
                        IgnoreRestOfLine();
                        return NewPPToken(PreprocessorTokenType.EndOfLine, position);
                    }
                    break;

                default:
                    if (IsLineSeparator(ch)) {
                        return NewPPToken(PreprocessorTokenType.EndOfLine, position);
                    }

                    if (!IsIdentifierChar(ch)) {
                        break;
                    }

                    _value.Length = 0;
                    _value.Append(ch);
                    while (IsIdentifierChar(PeekChar())) {
                        _value.Append(NextChar());
                    }
                    Name id = _nameTable.Add(_value);
                    PreprocessorTokenType type = _keywords.IsKeyword(id);
                    if (type != PreprocessorTokenType.Invalid) {
                        return NewPPToken(type, position);
                    }
                    else {
                        return new PreprocessorIdentifierToken(id, position);
                    }
            }

            return NewPPToken(PreprocessorTokenType.Unknown, position);
        }