コード例 #1
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        public void handleUndefDirective()
        {
            Console.WriteLine("saw #undef");
            List <PPToken> tokens = readRestOfDirective(false);
            PPToken        tok    = tokens[0];

            Macro.undefineMacro(tok.str);
        }
コード例 #2
0
ファイル: Tokenizer.cs プロジェクト: kohoutech/Black-C
        //- token handling ----------------------------------------------------

        public PPToken getPPToken()
        {
            PPToken ppTok = null;

            if (ppTokens.Count != 0)
            {
                ppTok = ppTokens[ppTokens.Count - 1];
                ppTokens.RemoveAt(ppTokens.Count - 1);
            }
            else
            {
                ppTok = pp.getPPToken();
            }
            return(ppTok);
        }
コード例 #3
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        public List <PPToken> readRestOfDirective(bool keepSpaces)
        {
            List <PPToken> tokens = new List <PPToken>();
            PPToken        tok    = source.getPPToken();

            while (tok.type != PPTokenType.EOLN)
            {
                if ((tok.type != PPTokenType.SPACE) || keepSpaces)
                {
                    tokens.Add(tok);
                }
                tok = source.getPPToken();
            }
            return(tokens);
        }
コード例 #4
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        public void handleIfndefDirective()
        {
            Console.WriteLine("saw #ifndef");
            List <PPToken> tokens = readRestOfDirective(false);

            if (skippingTokens)
            {
                ifStack.Add(-3);
            }
            else
            {
                PPToken token = tokens[0];
                Macro   macro = Macro.lookupMacro(token.str);
                int     skip  = (macro == null) ? 1 : -1;
                ifStack.Add(skip);
            }
            skippingTokens = ((ifStack[ifStack.Count - 1]) < 0);
        }
コード例 #5
0
        public static PPToken getfrag()
        {
            PPToken result = curMacro.invokeList[curMacro.curPos++];

            if (curMacro.curPos == curMacro.invokeList.Count)
            {
                Console.WriteLine("leaving macro " + curMacro.name);
                macroList.RemoveAt(macroList.Count - 1);
                if (macroList.Count > 0)
                {
                    curMacro = macroList[macroList.Count - 1];
                }
                else
                {
                    curMacro = null;
                }
            }
            return(result);
        }
コード例 #6
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        //- preprocessing only ------------------------------------------------

        public void preprocessFile(String outname)
        {
            List <String> lines = new List <string>();
            StringBuilder line  = new StringBuilder();

            PPToken tok = getPPToken();

            while (tok.type != PPTokenType.EOF)
            {
                Console.WriteLine(tok.ToString());
                tok = getPPToken();
            }
            Console.WriteLine(tok.ToString());

            //while (frag.type != PPTokenType.EOF)
            //{
            //    if (frag.type == PPTokenType.EOLN)
            //    {
            //        lines.Add(line.ToString());
            //        line.Clear();
            //    }
            //    else
            //    {
            //        line.Append(frag.ToString());
            //    }
            //    frag = getPPToken();
            //}
            //if (line.Length > 0)
            //{
            //    lines.Add(line.ToString());
            //}

            //if not saving spaces in output, compress multiple blank lines into one blank line
            //if (!parser.saveSpaces)
            //{
            //    lines = removeBlankLines(lines);
            //}

            //File.WriteAllLines(outname, lines);
        }
コード例 #7
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        //- directive handling ------------------------------------------------

        //(6.10) Preprocessing directives

        //handle directive, this will read all the pp tokens to the eoln in the directive line
        public void handleDirective()
        {
            PPToken tok = source.getPPToken();

            while (tok.type == PPTokenType.SPACE)       //skip space(s) & get directive name
            {
                tok = source.getPPToken();
            }

            if (tok.type == PPTokenType.EOLN)          //skip empty directives, ie "#  <eoln>"
            {
                return;
            }

            if (tok.type == PPTokenType.WORD)
            {
                switch (tok.str)
                {
                case "include":
                    if (!skippingTokens)
                    {
                        handleIncludeDirective();
                    }
                    break;

                case "define":
                    if (!skippingTokens)
                    {
                        handleDefineDirective();
                    }
                    break;

                case "undef":
                    if (!skippingTokens)
                    {
                        handleUndefDirective();
                    }
                    break;

                case "if":
                    handleIfDirective();
                    break;

                case "ifdef":
                    handleIfdefDirective();
                    break;

                case "ifndef":
                    handleIfndefDirective();
                    break;

                case "elif":
                    if (!skippingTokens)
                    {
                        handleElifDirective();
                    }
                    break;

                case "else":
                    handleElseDirective();
                    break;

                case "endif":
                    handleEndifDirective();
                    break;

                case "line":
                    if (!skippingTokens)
                    {
                        handleLineDirective();
                    }
                    break;

                case "error":
                    if (!skippingTokens)
                    {
                        handleErrorDirective();
                    }
                    break;

                case "pragma":
                    if (!skippingTokens)
                    {
                        handlePragmaDirective();
                    }
                    break;

                default:
                    //parser.error("saw unknown directive #" + tok.str + " at " + tok.loc.ToString());
                    readRestOfDirective(false);
                    break;
                }
            }
            else
            {
                //parser.error("invalid directive #" + pptok.str + " at " + pptok.loc.ToString());
                readRestOfDirective(false);
            }
        }
コード例 #8
0
ファイル: Preprocessor.cs プロジェクト: kohoutech/Black-C
        //- pp token stream handling ------------------------------------------

        //handles macro expansion & eof in include files
        //handle directives in the scanner's fragment stream, will be sent to tokenizer as EOLN fragments
        public PPToken getPPToken()
        {
            PPToken tok = null;

            while (true)
            {
                tok = source.getPPToken();
                //Console.WriteLine("pp token = " + tok.ToString());

                //check for directive as first non-space frag at start of line
                if (atLineStart)
                {
                    if ((tok.type == PPTokenType.PUNCT) && (tok.str[0] == '#'))
                    {
                        handleDirective();
                        tok = new PPToken(PPTokenType.EOLN, "<eoln>");        //cur pp token will be left as the EOLN at end of directive line
                    }
                    else
                    {
                        atLineStart = (tok.type == PPTokenType.SPACE || tok.type == PPTokenType.COMMENT);
                    }
                }
                if ((tok.type == PPTokenType.EOLN) || (tok.type == PPTokenType.EOF))
                {
                    atLineStart = true;
                }

                //check for a macro if not skipping tokens
                if (tok.type == PPTokenType.WORD && !skippingTokens)
                {
                    Macro macro = Macro.lookupMacro(tok.str);
                    if (macro != null)
                    {
                        //invokeMacro(macro);                 //start macro running
                        continue;                               //and loop around to get first macro token (if not empty)
                    }
                }

                //check if we've hit the end of macro. if this is a macro, pull it off the stack
                //and resume scanning at the point we stopped in the previous source
                //note: check for macro end first because a file can contain a macro, but a macro can't include a file
                if ((tok.type == PPTokenType.EOF) && (macroStack.Count > 0))
                {
                    macroStack.RemoveAt(macroStack.Count - 1);
                    if (macroStack.Count > 0)
                    {
                        source = macroStack[macroStack.Count - 1];
                    }
                    else
                    {
                        source = sourceStack[sourceStack.Count - 1];
                    }
                    continue;                                           //get next token from including source
                }

                //check if we've hit the end of file. if this is an include file, pull it off the stack
                //and resume scanning at the point we stopped in the including file
                //we return the EOF token from the main file only
                if ((tok.type == PPTokenType.EOF) && (sourceStack.Count > 1))
                {
                    //Console.WriteLine("closing include file " + sourceStack[sourceStack.Count - 1].filename);
                    sourceStack.RemoveAt(sourceStack.Count - 1);
                    source = sourceStack[sourceStack.Count - 1];
                    continue;                                           //get next token from including source if not at main file
                }

                if (!skippingTokens)
                {
                    break;
                }
            }

            return(tok);
        }
コード例 #9
0
        //(5.1.1.2) translation phase 3 : scan source line into preprocessing tokens
        override public PPToken getPPToken()
        {
            PPToken tok     = null;
            int     tokpos  = linepos;
            int     tokline = linenum;

            tokstr.Clear();

            char ch = getChar();

            nextChar();
            while (true)
            {
                //end of file - check if this isn't a stray 0x0 char in file, if so pass it on as punctuation
                if (ch == '\0' && atEof)
                {
                    tok = new PPToken(PPTokenType.EOF, "<eof>");
                    break;
                }

                //end of line - does not include eolns in block comments or spliced lines
                if (ch == '\n')
                {
                    tok = new PPToken(PPTokenType.EOLN, "<eoln>");
                    break;
                }

                if (isSpace(ch))
                {
                    skipWhitespace();
                    tok = new PPToken(PPTokenType.SPACE, " ");
                    break;
                }

                //line comment
                if (ch == '/' && (getChar() == '/'))
                {
                    skipLineComment();
                    ch = ' ';                   //replace comment with single space
                    continue;
                }

                //block comment
                if (ch == '/' && (getChar() == '*'))
                {
                    skipBlockComment();
                    ch = ' ';                   //replace comment with single space
                    continue;
                }

                //L is a special case since it can start long char constants or long string constants, as well as identifiers
                if (ch == 'L')
                {
                    if (getChar() == '\'')
                    {
                        string chstr = scanCharLiteral(true);
                        tok = new PPToken(PPTokenType.CHAR, chstr);
                        break;
                    }
                    else if (getChar() == '"')
                    {
                        string sstr = scanString(true);
                        tok = new PPToken(PPTokenType.STRING, sstr);
                        break;
                    }
                }

                //if L doesn't start a string or char constant, it falls through to here
                //identifier
                if (isAlpha(ch))
                {
                    string idstr = scanIdentifier(ch);
                    tok = new PPToken(PPTokenType.WORD, idstr);
                    break;
                }

                //numeric constant
                //'.' can start a float const
                if (isDigit(ch) || (ch == '.' && isDigit(getChar())))
                {
                    bool   isInt;
                    string numstr = scanNumber(ch, out isInt);
                    tok = new PPToken(isInt ? PPTokenType.INTEGER : PPTokenType.FLOAT, numstr);
                    break;
                }

                //char constant
                if (ch == '\'')
                {
                    string chstr = scanCharLiteral(false);
                    tok = new PPToken(PPTokenType.CHAR, chstr);
                    break;
                }

                //string constant
                if (ch == '"')
                {
                    string sstr = scanString(false);
                    tok = new PPToken(PPTokenType.STRING, sstr);
                    break;
                }

                //translate chars before handling punctuation
                ch = TranslateDiagraphs(ch);

                //anything else is punctuation
                tok = new PPToken(PPTokenType.PUNCT, "" + ch);
                break;
            }

            tok.pos  = tokpos;
            tok.line = tokline;
            return(tok);
        }
コード例 #10
0
ファイル: Tokenizer.cs プロジェクト: kohoutech/Black-C
        //convert preprocessor tokens (strings) into c tokens as input for the parser

        public Token getToken()
        {
            Token   tok = null;
            PPToken ppTok;
            PPToken nextppTok;

            while (true)
            {
                ppTok = getPPToken();

                //ignore spaces, comments & eolns
                if ((ppTok.type == PPTokenType.SPACE) || (ppTok.type == PPTokenType.COMMENT) || (ppTok.type == PPTokenType.EOLN))
                {
                    continue;
                }

                //check if word is keyword or identifier
                if (ppTok.type == PPTokenType.WORD)
                {
                    if (keywords.ContainsKey(ppTok.str))
                    {
                        tok = new Token(keywords[ppTok.str]);
                    }
                    else
                    {
                        tok = new IdentToken(ppTok.str);
                    }
                    break;
                }

                //convert int / float / string / char str into constant value
                if (ppTok.type == PPTokenType.INTEGER)
                {
                    tok = ParseInteger(ppTok.str);
                    break;
                }

                if (ppTok.type == PPTokenType.FLOAT)
                {
                    tok = ParseFloat(ppTok.str);
                    break;
                }

                if (ppTok.type == PPTokenType.STRING)
                {
                    tok = ParseString(ppTok.str);
                    StringConstToken stok1 = (StringConstToken)tok;

                    //convert any subsequent pp strings into string tokens & merge them together
                    PPToken pptok2 = getPPToken();
                    while (pptok2.type == PPTokenType.STRING)
                    {
                        StringConstToken stok2 = ParseString(pptok2.str);
                        stok1.val = stok1.val + stok2.val;
                        if (!stok1.isWide)
                        {
                            stok1.isWide = stok2.isWide;
                        }
                        pptok2 = getPPToken();
                    }
                    replacePPToken(pptok2);
                    break;
                }

                if (ppTok.type == PPTokenType.CHAR)
                {
                    tok = ParseChar(ppTok.str);
                    break;
                }

                //convert single punctuation chars into punctuation tokens, combining as necessary
                //need 2 lookaheads at most for '...' token
                if (ppTok.type == PPTokenType.PUNCT)
                {
                    char c = ppTok.str[0];
                    switch (c)
                    {
                    case '[':
                        tok = new Token(TokenType.LBRACKET);
                        break;

                    case ']':
                        tok = new Token(TokenType.RBRACKET);
                        break;

                    case '(':
                        tok = new Token(TokenType.LPAREN);
                        break;

                    case ')':
                        tok = new Token(TokenType.RPAREN);
                        break;

                    case '{':
                        tok = new Token(TokenType.LBRACE);
                        break;

                    case '}':
                        tok = new Token(TokenType.RBRACE);
                        break;

                    case '+':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '+'))
                        {
                            tok = new Token(TokenType.PLUSPLUS);
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.PLUSEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.PLUS);
                        }
                        break;

                    case '-':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '-'))
                        {
                            tok = new Token(TokenType.MINUSMINUS);
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.MINUSEQUAL);
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '>'))
                        {
                            tok = new Token(TokenType.ARROW);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.MINUS);
                        }
                        break;

                    case '*':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.MULTEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.STAR);
                        }
                        break;

                    case '/':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.SLASHEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.SLASH);
                        }
                        break;

                    case '%':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.PERCENTEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.PERCENT);
                        }
                        break;

                    case '&':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '&'))
                        {
                            tok = new Token(TokenType.AMPAMP);
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.AMPEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.AMPERSAND);
                        }
                        break;

                    case '|':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '|'))
                        {
                            tok = new Token(TokenType.BARBAR);
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.BAREQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.BAR);
                        }
                        break;

                    case '~':
                        tok = new Token(TokenType.TILDE);
                        break;

                    case '^':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.CARETEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.CARET);
                        }
                        break;

                    case '=':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.EQUALEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.EQUAL);
                        }
                        break;

                    case '!':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.NOTEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.EXCLAIM);
                        }
                        break;

                    case '<':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '<'))
                        {
                            PPToken tok2 = getPPToken();
                            if ((tok2.type == PPTokenType.PUNCT) && (tok2.str[0] == '='))
                            {
                                tok = new Token(TokenType.LESSLESSEQUAL);       //<<=
                            }
                            else
                            {
                                replacePPToken(tok2);
                                tok = new Token(TokenType.LESSLESS);
                            }
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.LESSEQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.LESSTHAN);
                        }
                        break;

                    case '>':
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '>'))
                        {
                            PPToken tok2 = getPPToken();
                            if ((tok2.type == PPTokenType.PUNCT) && (tok2.str[0] == '='))
                            {
                                tok = new Token(TokenType.GTRGTREQUAL);       //>>=
                            }
                            else
                            {
                                replacePPToken(tok2);
                                tok = new Token(TokenType.GTRGTR);
                            }
                        }
                        else if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '='))
                        {
                            tok = new Token(TokenType.GTREQUAL);
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                            tok = new Token(TokenType.GTRTHAN);
                        }
                        break;

                    case ',':
                        tok = new Token(TokenType.COMMA);
                        break;

                    case '.':
                        bool threedots = false;
                        nextppTok = getPPToken();
                        if ((nextppTok.type == PPTokenType.PUNCT) && (nextppTok.str[0] == '.'))
                        {
                            PPToken tok2 = getPPToken();
                            if ((tok2.type == PPTokenType.PUNCT) && (tok2.str[0] == '.'))
                            {
                                tok       = new Token(TokenType.ELLIPSIS);      //...
                                threedots = true;
                            }
                            else
                            {
                                replacePPToken(nextppTok);
                                replacePPToken(tok2);
                            }
                        }
                        else
                        {
                            replacePPToken(nextppTok);
                        }
                        if (!threedots)
                        {
                            tok = new Token(TokenType.PERIOD);
                        }
                        break;

                    case '?':
                        tok = new Token(TokenType.QUESTION);
                        break;

                    case ':':
                        tok = new Token(TokenType.COLON);
                        break;

                    case ';':
                        tok = new Token(TokenType.SEMICOLON);
                        break;

                    default:
                        tok = new Token(TokenType.ERROR);
                        break;
                    }
                    break;
                }

                //last but not least - end of file
                if (ppTok.type == PPTokenType.EOF)
                {
                    tok = new Token(TokenType.EOF);
                    break;
                }
            }

            return(tok);
        }
コード例 #11
0
ファイル: Tokenizer.cs プロジェクト: kohoutech/Black-C
 public void replacePPToken(PPToken ppTok)
 {
     ppTokens.Add(ppTok);
 }