Beispiel #1
0
 //static int appendKeyword(CTX, kKonohaSpace *ks, kArray *tls, int s, int e, kArray *dst, kToken **tkERR)
 private int appendKeyword(IList<Token> tokens, int start, int end, IList<Token> tokensDst, out Token errorToken)
 {
     int next = start; // don't add
     Token tk = tokens[start];
     if (tk.Type < TokenType.OPERATOR)
     {
         tk.Keyword = (KeywordType)tk.Type;
     }
     if (tk.Type == TokenType.SYMBOL)
     {
         tk.IsResolved(ctx, ks);
     }
     else if (tk.Type == TokenType.USYMBOL)
     {
         if (!tk.IsResolved(ctx, ks))
         {
             throw new NotImplementedException();
             //KonohaClass ct = kKonohaSpace_getCT(ks, null/*FIXME*/, tk.Text, tk.Text.Length, TY_unknown);
             object ct = null;
             if (ct != null)
             {
                 tk.Keyword = KeywordType.Type;
                 //tk.Type = ct->cid;
             }
         }
     }
     else if (tk.Type == TokenType.OPERATOR)
     {
         if (!tk.IsResolved(ctx, ks))
         {
             uint errref = ctx.SUGAR_P(ReportLevel.ERR, tk.ULine, 0, "undefined token: {0}", tk.Text);
             tk.toERR(this.ctx, errref);
             errorToken = tk;
             return end;
         }
     }
     else if (tk.Type == TokenType.CODE)
     {
         tk.Keyword = KeywordType.Brace;
     }
     if (tk.IsType)
     {
         while (next + 1 < end)
         {
             Token tkN = tokens[next + 1];
             if (tkN.TopChar != '[')
                 break;
             List<Token> abuf = new List<Token>();
             int atop = abuf.Count;
             next = makeTree(TokenType.AST_BRANCET, tokens, next + 1, end, ']', abuf, out errorToken);
             if (abuf.Count > atop)
             {
                 tk.ResolveType(this.ctx, abuf[atop]);
             }
         }
     }
     if (tk.Keyword > KeywordType.Expr)
     {
         tokensDst.Add(tk);
     }
     errorToken = null;
     return next;
 }
Beispiel #2
0
 // static int lookAheadKeyword(kArray *tls, int s, int e, kToken *rule)
 public int lookAheadKeyword(IList<Token> tls, int s, int e, Token rule)
 {
     int i;
     for (i = s; i < e; i++)
     {
         Token tk = tls[i];
         if (rule.Keyword == tk.Keyword)
             return i;
     }
     return -1;
 }
Beispiel #3
0
 bool isUnaryOp(Context ctx, Token tk)
 {
     Syntax syn = ks.GetSyntax(tk.Keyword);
     return syn != null && syn.Op1 != null;
 }
Beispiel #4
0
        static int TokenizeSymbol(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            int pos = tokStart;
            string ts = tenv.Source;

            while (pos < ts.Length && IsSymbolic(ts[pos]))
                ++pos;

            token = new Token(TokenType.SYMBOL, ts.Substring(tokStart, pos - tokStart), tokStart);
            return pos;
        }
Beispiel #5
0
 static int TokenizeUndefined(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
 {
     token = null;
     return tokStart;
 }
Beispiel #6
0
        static int TokenizeBlock(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            string ts = tenv.Source;
            char ch = '\0';
            int pos = tokStart + 1;
            int level = 1;
            FTokenizer[] fmat = tenv.TokenizerMatrix;

            token = null;

            while (pos < ts.Length)
            {
                ch = ts[pos];
                if (ch == '}')
                {
                    level--;
                    if (level == 0)
                    {
                        token = new Token(TokenType.CODE, ts.Substring(tokStart + 1, pos - 1 - tokStart), tokStart + 1);
                        return pos + 1;
                    }
                    pos++;
                }
                else if (ch == '{')
                {
                    level++;
                    pos++;
                }
                else
                {
                    var f = fmat[(int)charTypeMatrix[ch]];
                    pos = f(ctx, out token, tenv, pos, null);
                }
            }
            return pos;
        }
Beispiel #7
0
 static int TokenizeSlash(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
 {
     string ts = tenv.Source;
     if (ts[tokStart + 1] == '/')
     {
         return TokenizeLine(ctx, out token, tenv, tokStart, thunk);
     }
     else if (ts[tokStart + 1] == '*')
     {
         return TokenizeComment(ctx, out token, tenv, tokStart, thunk);
     }
     return TokenizeOperator(ctx, out token, tenv, tokStart, thunk);
 }
Beispiel #8
0
        static int TokenizeNumber(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            int pos = tokStart;
            bool dotAppeared = false;
            string ts = tenv.Source;

            while (pos < ts.Length)
            {
                char ch = ts[pos++];
                if (ch == '_')
                    continue; // nothing
                if (ch == '.')
                {
                    if (!IsNumChar(ts[pos]))
                    {
                        --pos;
                        break;
                    }
                    dotAppeared = true;
                    continue;
                }
                if ((ch == 'e' || ch == 'E') && (ts[pos] == '+' || ts[pos] == '-'))
                {
                    pos++;
                    continue;
                }
                if (!IsAlphaOrNum(ch))
                {
                    --pos;
                    break;
                }
            }

            string str = ts.Substring(tokStart, pos - tokStart).Replace("_", "");
            if (dotAppeared)
            {
                token = new Token(TokenType.FLOAT, str, tokStart);
            }
            else
            {
                token = new Token(TokenType.INT, str, tokStart);
            }
            return pos;  // next
        }
Beispiel #9
0
 static int TokenizeOneCharOperator(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
 {
     token = new Token(TokenType.OPERATOR, tenv.Source.Substring(tokStart, 1), tokStart);
     return ++tokStart;
 }
Beispiel #10
0
 static int TokenizeLine(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
 {
     string ts = tenv.Source;
     int pos = tokStart;
     while (ts[pos] != '\n' || ts[pos] != '\r')
         ++pos;
     token = null;
     return pos;
 }
Beispiel #11
0
        static int TokenizeNextline(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            int pos = tokStart;
            string ts = tenv.Source;

            if (pos < ts.Length)
            {
                if (ts[pos] == '\r')
                {
                    ++pos;
                }
                if (ts[pos] == '\n')
                {
                    ++pos;
                }
            }

            tenv.Line.LineNumber += 1;
            tenv.Bol = pos;
            return TokenizeIndent(ctx, out token, tenv, pos, thunk);
        }
Beispiel #12
0
        static int TokenizeIndent(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            int pos = tokStart;
            string ts = tenv.Source;
            int indent = 0;

            if (pos < ts.Length)
            {
                char ch = ts[pos++];
                if (ch == '\t')
                {
                    indent += tenv.TabWidth;
                }
                else if (ch == ' ')
                {
                    indent += 1;
                }
                else
                {
                    --pos;
                }
            }
            token = null;

            return pos;
        }
Beispiel #13
0
        static int TokenizeDoubleQuote(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            string ts = tenv.Source;
            char ch = '\0';
            char prev = '"';
            int pos = tokStart + 1;

            token = null;

            while (pos < ts.Length)
            {
                ch = ts[pos++];
                if (ch == '\n' || ch == '\r')
                {
                    break;
                }
                if (ch == '"' && prev != '\\')
                {
                    token = new Token(TokenType.TEXT, ts.Substring(tokStart + 1, (pos - 1) - (tokStart + 1)), tokStart + 1);
                    return pos;
                }
                prev = ch;
            }
            return pos - 1;
        }
Beispiel #14
0
        static int TokenizeComment(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            string ts = tenv.Source;
            int pos = tokStart + 2;
            char ch = '\0';
            char prev = '\0';
            int level = 1;
            token = null;

            while (pos < ts.Length)
            {
                ch = ts[pos++];
                if (ch == '\r')
                {
                    tenv.Line.LineNumber += 1;
                    if (ts[pos] == '\n')
                        ++pos;
                }
                else if (ch == '\n')
                {
                    tenv.Line.LineNumber += 1;
                }
                if (prev == '*' && ch == '/')
                {
                    level--;
                    if (level == 0)
                        return pos;
                }
                else if (prev == '/' && ch == '*')
                {
                    level++;
                }
                prev = ch;
            }

            return pos;
        }
Beispiel #15
0
        // static int makeTree(CTX, kKonohaSpace *ks, ktoken_t tt, kArray *tls, int s, int e, int closech, kArray *tlsdst, kToken **tkERRRef)
        private int makeTree(TokenType tokentype, IList<Token> tokens, int start, int end, char closeChar, IList<Token> tokensDst, out Token errorToken)
        {
            int i, probablyCloseBefore = end - 1;
            Token tk = tokens[start];
            Debug.Assert(tk.Keyword == 0);

            Token tkP = new Token(tokentype, tk.Text, closeChar) { Keyword = (KeywordType)tokentype };
            tokensDst.Add(tkP);
            tkP.Sub = new List<Token>();
            for (i = start + 1; i < end; i++)
            {
                tk = tokens[i];
                Debug.Assert(tk.Keyword == 0);
                if (tk.Type == TokenType.ERR)
                    break;  // ERR
                Debug.Assert(tk.TopChar != '{');
                if (tk.TopChar == '(')
                {
                    i = makeTree(TokenType.AST_PARENTHESIS, tokens, i, end, ')', tkP.Sub, out errorToken);
                    tk.Keyword = KeywordType.Parenthesis;
                    continue;
                }
                else if (tk.TopChar == '[')
                {
                    i = makeTree(TokenType.AST_BRANCET, tokens, i, end, ']', tkP.Sub, out errorToken);
                    tk.Keyword = KeywordType.Brancet;
                    continue;
                }
                else if (tk.TopChar == closeChar)
                {
                    errorToken = null;
                    return i;
                }
                if ((closeChar == ')' || closeChar == ']') && tk.Type == TokenType.CODE)
                    probablyCloseBefore = i;
                if (tk.Type == TokenType.INDENT && closeChar != '}')
                    continue;  // remove INDENT;
                i = appendKeyword(tokens, i, end, tkP.Sub, out errorToken);
            }
            if (tk.Type != TokenType.ERR)
            {
                uint errref = ctx.SUGAR_P(ReportLevel.ERR, tk.ULine, 0, "'{0}' is expected (probably before {1})", closeChar.ToString(), tokens[probablyCloseBefore].Text);
                tkP.toERR(this.ctx, errref);
            }
            else
            {
                tkP.Type = TokenType.ERR;
            }
            errorToken = tkP;
            return end;
        }
Beispiel #16
0
        static int TokenizeOperator(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk)
        {
            int pos = tokStart;
            string ts = tenv.Source;

            while (pos < ts.Length)
            {
                switch (ts[pos])
                {
                    case '<':
                    case '>':
                    case '@':
                    case '$':
                    case '#':
                    case '+':
                    case '-':
                    case '*':
                    case '%':
                    case '/':
                    case '=':
                    case '&':
                    case '?':
                    case ':':
                    case '.':
                    case '^':
                    case '!':
                    case '~':
                    case '|':
                        ++pos;
                        continue;
                }
                break;
            }
            token = new Token(TokenType.OPERATOR, ts.Substring(tokStart, pos - tokStart), tokStart);
            return pos;
        }
Beispiel #17
0
 // static int selectStmtLine(CTX, kKonohaSpace *ks, int *indent, kArray *tls, int s, int e, int delim, kArray *tlsdst, kToken **tkERRRef)
 private int SelectStatementLine(ref int indent, IList<Token> tokens, int start, int end, char delim, IList<Token> tokensDst, out Token errorToken)
 {
     int i = start;
     Debug.Assert(end <= tokens.Count);
     for (; i < end - 1; i++)
     {
         Token tk = tokens[i];
         Token tk1 = tokens[i + 1];
         if (tk.Keyword != 0)
             break;  // already parsed
         if (tk.TopChar == '@' && (tk1.Type == TokenType.SYMBOL || tk1.Type == TokenType.USYMBOL))
         {
             tk1.Type = TokenType.METANAME;
             tk1.Keyword = 0;
             tokensDst.Add(tk1);
             i++;
             if (i + 1 < end && tokens[i + 1].TopChar == '(')
             {
                 i = makeTree(TokenType.AST_PARENTHESIS, tokens, i + 1, end, ')', tokensDst, out errorToken);
             }
             continue;
         }
         if (tk.Type == TokenType.METANAME)
         {  // already parsed
             tokensDst.Add(tk);
             if (tk1.Type == TokenType.AST_PARENTHESIS)
             {
                 tokensDst.Add(tk1);
                 i++;
             }
             continue;
         }
         if (tk.Type != TokenType.INDENT)
             break;
         if (indent == 0)
             indent = tk.Text.Length;
     }
     for (; i < end; i++)
     {
         var tk = tokens[i];
         if (tk.TopChar == delim && tk.Type == TokenType.OPERATOR)
         {
             errorToken = null;
             return i + 1;
         }
         if (tk.Keyword != 0)
         {
             tokensDst.Add(tk);
             continue;
         }
         else if (tk.TopChar == '(')
         {
             i = makeTree(TokenType.AST_PARENTHESIS, tokens, i, end, ')', tokensDst, out errorToken);
             tk.Keyword = KeywordType.Parenthesis;
             continue;
         }
         else if (tk.TopChar == '[')
         {
             i = makeTree(TokenType.AST_BRANCET, tokens, i, end, ']', tokensDst, out errorToken);
             tk.Keyword = KeywordType.Brancet;
             continue;
         }
         else if (tk.Type == TokenType.ERR)
         {
             errorToken = tk;
         }
         if (tk.Type == TokenType.INDENT)
         {
             if (tk.Text.Length <= indent)
             {
                 Debug.WriteLine(string.Format("tk.Lpos={0}, indent={1}", tk.Text.Length, indent));
                 errorToken = null;
                 return i + 1;
             }
             continue;
         }
         i = appendKeyword(tokens, i, end, tokensDst, out errorToken);
     }
     errorToken = null;
     return i;
 }
Beispiel #18
0
 internal Token ResolveType(Context ctx, Token tkP)
 {
     int i;
     // 型引数の取得
     int size = tkP.Sub.Count;
     var p = new List<KonohaParam>();
     for (i = 0; i < size; i++)
     {
         Token tkT = (tkP.Sub[i]);
         if (tkT.Keyword == KeywordType.Type)
         {
             p.Add(new KonohaParam() { Type = tkT.Type });
         }
     }
     throw new NotImplementedException();
     // 以下未実装
     KonohaClass ct;
     if (p.Count > 0)
     {
         ct = null;// this.ctx.share.ca.cts[(int)this.Type];
         if (ct.cparam == KonohaParam.NULL)
         {
             ctx.SUGAR_P(ReportLevel.ERR, this.ULine, 0, "not generic type: %s", this.KType.ToString());
             return this;
         }
         //ct = kClassTable_Generics(ct, TY_void, p.Count, p);
     }
     else
     {
         //ct = CT_P0(_ctx, CT_Array, this_type(this));
     }
     this.Type = (TokenType)ct.cid;
     return this;
 }