// static int Stmt_addAnnotation(CTX, kStmt *stmt, kArray *tls, int s, int e) public int addAnnotation(Context ctx, IList<Token> tls, int s, int e) { int i; for (i = s; i < e; i++) { Token tk = tls[i]; if (tk.Type != TokenType.METANAME) break; if (i + 1 < e) { var kw = ctx.kmodsugar.keyword_("@" + tk.Text, Symbol.NewID).Type; Token tk1 = tls[i + 1]; KonohaExpr value = null; if (tk1.Type == TokenType.AST_PARENTHESIS) { value = this.newExpr2(ctx, tk1.Sub, 0, tk1.Sub.Count); i++; } if (value != null) { annotation[kw] = true; } } } return i; }
// static kExpr *Expr_rightJoin(CTX, kExpr *expr, kStmt *stmt, kArray *tls, int s, int c, int e) public static KonohaExpr Expr_rightJoin(Context ctx, KonohaExpr expr, KStatement stmt, IList<Token> tls, int s, int c, int e) { if(c < e && expr != null) { //WARN_Ignored(_ctx, tls, c, e); } return expr; }
public static Symbol Get(Context ctx, string name) { if (!ctx.share.SymbolMap.ContainsKey(name)) { ctx.share.SymbolMap.Add(name, new Symbol() { Name = name }); } return ctx.share.SymbolMap[name]; }
public Konoha() { ctx = new Context(); space = new KonohaSpace(ctx); }
// static int Stmt_skipUnaryOp(CTX, kStmt *stmt, kArray *tls, int s, int e) int skipUnaryOp(Context ctx, IList<Token> tls, int s, int e) { int i; for(i = s; i < e; i++) { Token tk = tls[i]; if(!isUnaryOp(ctx, tk)) { break; } } return i; }
//static int Stmt_findBinaryOp(CTX, kStmt *stmt, kArray *tls, int s, int e, ksyntax_t **synRef) int findBinaryOp(Context ctx, IList<Token> tls, int s, int e, ref Syntax synRef) { int idx = -1; int prif = 0; for(int i = skipUnaryOp(ctx, tls, s, e) + 1; i < e; i++) { Token tk = tls[i]; Syntax syn = ks.GetSyntax(tk.Keyword); // if(syn != NULL && syn->op2 != 0) { if(syn.priority > 0) { if (prif < syn.priority || (prif == syn.priority && syn.Flag != SynFlag.ExprLeftJoinOp2)) { prif = syn.priority; idx = i; synRef = syn; } if(syn.Flag != SynFlag.ExprPostfixOp2) { /* check if real binary operator to parse f() + 1 */ i = skipUnaryOp(ctx, tls, i+1, e) - 1; } } } return idx; }
// static kbool_t Stmt_parseSyntaxRule(CTX, kStmt *stmt, kArray *tls, int s, int e) public bool parseSyntaxRule(Context ctx, IList<Token> tls, int s, int e) { bool ret = false; Syntax syn = this.ks.GetSyntaxRule(tls, s, e); //Debug.Assert(syn != null); if (syn != null && syn.SyntaxRule != null) { this.syn = syn; ret = (matchSyntaxRule(ctx, syn.SyntaxRule, this.ULine, tls, s, e, false) != -1); } else { ctx.SUGAR_P(ReportLevel.ERR, this.ULine, 0, "undefined syntax rule for '{0}'", syn.KeyWord.ToString()); } return ret; }
// static kExpr *ParseExpr(CTX, ksyntax_t *syn, kStmt *stmt, kArray *tls, int s, int c, int e) public KonohaExpr ParseExpr(Context ctx, Syntax syn, IList<Token> tls, int s, int c, int e) { Debug.Assert(syn != null); if (syn.ParseExpr != null) { return syn.ParseExpr(ctx, syn, this, tls, s, c, e); } return KModSugar.UndefinedParseExpr(ctx, syn, this, tls, s, c, e); }
private static KonohaExpr ParseExpr_Dot(Context ctx, Syntax syn, KStatement stmt, IList<Token> tls, int s, int c, int e) { Console.WriteLine("s={0}, c={1}", s, c); Debug.Assert(s < c); if (isFieldName(tls, c, e)) { KonohaExpr expr = stmt.newExpr2(ctx, tls, s, c); expr = new ConsExpr(ctx, syn, tls[c + 1], expr); return KModSugar.Expr_rightJoin(ctx, expr, stmt, tls, c + 2, c + 2, e); } if (c + 1 < e) c++; return new ConsExpr(ctx, syn, tls[c], ReportLevel.ERR, "expected field name: not " + tls[c].Text); }
public static int ParseStmt_Usymbol(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { int r = -1; Token tk = tls[s]; if (tk.Type == TokenType.USYMBOL) { stmt.map.Add(name, new SingleTokenExpr(tk)); r = s + 1; } return r; }
public static int ParseStmt_Type(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { int r = -1; Token tk = tls[s]; if (tk.IsType) { //kObject_setObject(stmt, name, tk); stmt.map.Add(name, new SingleTokenExpr(tk)); r = s + 1; } return r; }
public static int ParseStmt_Expr(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { int r = -1; var expr = stmt.newExpr2(ctx, tls, s, e); if (expr != null) { //dumpExpr(_ctx, 0, 0, expr); //kObject_setObject(stmt, name, expr); stmt.map.Add(name, expr); r = e; } return r; }
// ast.h // static KMETHOD UndefinedParseExpr(CTX, ksfp_t *sfp _RIX) public static KonohaExpr UndefinedParseExpr(Context ctx, Syntax syn, KStatement stmt, IList<Token> tls, int start, int c, int end) { Token tk = tls[c]; ctx.SUGAR_P(ReportLevel.ERR, tk.ULine, 0, "undefined expression parser for '{0}'", tk.Text); return null; }
// static KMETHOD ParseExpr_Term(CTX, ksfp_t *sfp _RIX) public static KonohaExpr ParseExpr_Term(Context ctx, Syntax syn, KStatement stmt, IList<Token> tls, int s, int c, int e) { Debug.Assert(s == c); Token tk = tls[c]; KonohaExpr expr = new TermExpr(); //new_W(Expr, SYN_(kStmt_ks(stmt), tk->kw)); //Expr_setTerm(expr, 1); expr.tk = tk; return Expr_rightJoin(ctx, expr, stmt, tls, s + 1, c + 1, e); }
// static KMETHOD ParseExpr_Op(CTX, ksfp_t *sfp _RIX) public static KonohaExpr ParseExpr_Op(Context ctx, Syntax syn, KStatement stmt, IList<Token> tls, int s, int c, int e) { Token tk = tls[c]; KonohaExpr expr = null; KonohaExpr rexpr = stmt.newExpr2(ctx, tls, c + 1, e); KMethod mn = (s == c) ? syn.Op1 : syn.Op2; if (mn != null && syn.ExprTyCheck == ctx.kmodsugar.UndefinedExprTyCheck) { //kToken_setmn(tk, mn, (s == c) ? MNTYPE_unary: MNTYPE_binary); syn = stmt.ks.GetSyntax(KeywordType.ExprMethodCall); // switch type checker } if (s == c) { // unary operator expr = new ConsExpr(ctx, syn, tk, rexpr); } else { // binary operator KonohaExpr lexpr = stmt.newExpr2(ctx, tls, s, c); expr = new ConsExpr(ctx, syn, tk, lexpr, rexpr); } return expr; }
// static int matchSyntaxRule(CTX, kStmt *stmt, kArray *rules, kline_t /*parent*/uline, kArray *tls, int s, int e, int optional) public int matchSyntaxRule(Context ctx, IList<Token> rules, LineInfo /*parent*/uline, IList<Token> tls, int s, int e, bool optional) { int ri, ti, rule_size = rules.Count; ti = s; for (ri = 0; ri < rule_size && ti < e; ri++) { Token rule = rules[ri]; Token tk = tls[ti]; uline = tk.ULine; Console.WriteLine("matching rule={0},{1},{2} token={3},{4},{5}", ri, rule.Type, rule.Keyword, ti - s, tk.Type, tk.Text); if (rule.Type == TokenType.CODE) { if (rule.Keyword != tk.Keyword) { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.Keyword); return -1; } ti++; continue; } else if (rule.Type == TokenType.METANAME) { Syntax syn = this.ks.GetSyntax(rule.Keyword); if (syn == null || syn.ParseStmt == null) { tk.Print(ctx, ReportLevel.ERR, "unknown syntax pattern: {0}", rule.Keyword); return -1; } int c = e; if (ri + 1 < rule_size && rules[ri + 1].Type == TokenType.CODE) { c = lookAheadKeyword(tls, ti + 1, e, rules[ri + 1]); if (c == -1) { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.Keyword); return -1; } ri++; } int err_count = ctx.ctxsugar.err_count; int next = ParseStmt(ctx, syn, rule.nameid, tls, ti, c); Console.WriteLine("matched '{0}' nameid='{1}', next={2}=>{3}", rule.Keyword, rule.nameid.Name, ti, next); if (next == -1) { if (optional) { return s; } if (err_count == ctx.sugarerr_count) { tk.Print(ctx, ReportLevel.ERR, "unknown syntax pattern: {0}", this.syn.KeyWord, rule.Keyword, tk.Text); } return -1; } ////XXX Why??? //optional = 0; ti = (c == e) ? next : c + 1; continue; } else if (rule.Type == TokenType.AST_OPTIONAL) { int next = matchSyntaxRule(ctx, rule.Sub, uline, tls, ti, e, true); if (next == -1) { return -1; } ti = next; continue; } else if (rule.Type == TokenType.AST_PARENTHESIS || rule.Type == TokenType.AST_BRACE || rule.Type == TokenType.AST_BRANCET) { if (tk.Type == rule.Type && rule.TopChar == tk.TopChar) { int next = matchSyntaxRule(ctx, rule.Sub, uline, tk.Sub, 0, tk.Sub.Count, false); if (next == -1) { return -1; } ti++; } else { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.TopChar); return -1; } } } if (!optional) { for (; ri < rules.Count; ri++) { Token rule = rules[ri]; if (rule.Type != TokenType.AST_OPTIONAL) { ctx.SUGAR_P(ReportLevel.ERR, uline, -1, "{0} needs syntax pattern: {1}", this.syn.KeyWord, rule.Keyword); return -1; } } //WARN_Ignored(_ctx, tls, ti, e); } return ti; }
// static kExpr* Stmt_newExpr2(CTX, kStmt *stmt, kArray *tls, int s, int e) public KonohaExpr newExpr2(Context ctx, IList<Token> tls, int s, int e) { if(s < e) { Syntax syn = null; int idx = findBinaryOp(ctx, tls, s, e, ref syn); if(idx != -1) { Console.WriteLine("** Found BinaryOp: s={0}, idx={1}, e={2}, '{3}' **", s, idx, e, tls[idx].Text); return ParseExpr(ctx, syn, tls, s, idx, e); } int c = s; syn = ks.GetSyntax(tls[c].Keyword); Debug.Assert(syn != null); return ParseExpr(ctx, syn, tls, c, c, e); } else { if (0 < s - 1) { ctx.SUGAR_P(ReportLevel.ERR, ULine, -1, "expected expression after {0}", tls[s-1].Text); } else if(e < tls.Count) { ctx.SUGAR_P(ReportLevel.ERR, ULine, -1, "expected expression before {0}", tls[e].Text); } else { ctx.SUGAR_P(ReportLevel.ERR, ULine, 0, "expected expression"); } return null; } }
// static KMETHOD ParseExpr_Parenthesis(CTX, ksfp_t *sfp _RIX) private static KonohaExpr ParseExpr_Parenthesis(Context ctx, Syntax syn, KStatement stmt, IList<Token> tls, int s, int c, int e) { Token tk = tls[c]; if(s == c) { KonohaExpr expr = stmt.newExpr2(ctx, tk.Sub, 0, tk.Sub.Count); return KModSugar.Expr_rightJoin(ctx, expr, stmt, tls, s + 1, c + 1, e); } else { KonohaExpr lexpr = stmt.newExpr2(ctx, tls, s, c); if(lexpr == null) { return null; } if (lexpr.syn == null) { lexpr.syn = stmt.ks.GetSyntax(lexpr.tk.Keyword); } if(lexpr.syn.KeyWord == KeywordType.DOT) { lexpr.syn = stmt.ks.GetSyntax(KeywordType.ExprMethodCall); // CALL } else if(lexpr.syn.KeyWord != KeywordType.ExprMethodCall) { Console.WriteLine("function calls .. "); syn = stmt.ks.GetSyntax(KeywordType.Parenthesis); // (f null ()) lexpr = new ConsExpr(ctx, syn, lexpr, null); } stmt.addExprParams(ctx, lexpr, tk.Sub, 0, tk.Sub.Count, true/*allowEmpty*/); return KModSugar.Expr_rightJoin(ctx, lexpr, stmt, tls, s + 1, c + 1, e); } }
// static int ParseStmt(CTX, ksyntax_t *syn, kStmt *stmt, ksymbol_t name, kArray *tls, int s, int e) public int ParseStmt(Context ctx, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { //Console.WriteLine("ParseStmt {0}, {0}", name.Name, tls[s].Text); return syn.ParseStmt(ctx, this, syn, name, tls, s, e); }
// static KMETHOD ParseStmt_Block(CTX, ksfp_t *sfp _RIX) private static int ParseStmt_Block(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { //Console.WriteLine("ParseStmt_Block name:" + name.Name); Token tk = tls[s]; if(tk.Type == TokenType.CODE) { stmt.map.Add(name, new CodeExpr(tk)); return s + 1; } var parser = new Parser(ctx, stmt.ks); if (tk.Type == TokenType.AST_BRACE) { BlockExpr bk = parser.CreateBlock(stmt, tk.Sub, 0, tk.Sub.Count, ';'); stmt.map.Add(name, bk); return s + 1; } else { BlockExpr bk = parser.CreateBlock(stmt, tls, s, e, ';'); stmt.map.Add(name, bk); return e; } }
// Stmt_toERR public void toERR(Context ctx, uint estart) { this.syn = ks.GetSyntax(KeywordType.Err); this.build = StmtType.ERR; //kObject_setObject(stmt, KW_Err, kstrerror(eno)); }
// static KMETHOD ParseStmt_Params(CTX, ksfp_t *sfp _RIX) private static int ParseStmt_Params(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tokens, int s, int e) { int r = -1; Token tk = tokens[s]; if (tk.Type == TokenType.AST_PARENTHESIS) { var tls = tk.Sub; int ss = 0; int ee = tls.Count; if (0 < ee && tls[0].Keyword == KeywordType.Void) ss = 1; // f(void) = > f() BlockExpr bk = new Parser(ctx, stmt.ks).CreateBlock(stmt, tls, ss, ee, ','); stmt.map.Add(name, bk); r = s + 1; } return r; }
bool isUnaryOp(Context ctx, Token tk) { Syntax syn = ks.GetSyntax(tk.Keyword); return syn != null && syn.Op1 != null; }
// static KMETHOD ParseStmt_Toks(CTX, ksfp_t *sfp _RIX) private static int ParseStmt_Toks(Context ctx, KStatement stmt, Syntax syn, Symbol name, IList<Token> tls, int s, int e) { if (s < e) { var a = new List<Token>(); while (s < e) { a.Add(tls[s]); s++; } //kObject_setObject(stmt, name, a); //stmt.map.Add(name, a); throw new NotImplementedException(); return e; } return -1; }
public KonohaSpace(Context ctx) { this.ctx = ctx; this.scope = new ExpandoObject(); defineDefaultSyntax(); }
public KonohaSpace(Context ctx,int child) { this.ctx = ctx; }
public static Symbol Get(Context ctx, string name, Symbol def, SymPol pol) { /*if (pol == SYMPOL_RAW) { return ctx.share.SymbolMap[name]; } else { ksymbol_t sym, mask = 0; name = ksymbol_norm(buf, name, &len, &hcode, &mask, pol); sym = Kmap_getcode(_ctx, _ctx->share->symbolMapNN, _ctx->share->symbolList, name, len, hcode, SPOL_ASCII, def); if(def == sym) return def; return sym | mask; }*/ if (!ctx.share.SymbolMap.ContainsKey(name)) { ctx.share.SymbolMap.Add(name, new Symbol() { Name = name }); } return ctx.share.SymbolMap[name]; }
// static kExpr *Stmt_addExprParams(CTX, kStmt *stmt, kExpr *expr, kArray *tls, int s, int e, int allowEmpty) public void addExprParams(Context ctx, KonohaExpr expr, IList<Token> tls, int s, int e, bool allowEmpty) { int i, start = s; for(i = s; i < e; i++) { Token tk = tls[i]; if(tk.Keyword == KeywordType.COMMA) { ((ConsExpr)expr).Add(ctx, newExpr2(ctx, tls, start, i)); start = i + 1; } } if(!allowEmpty || start < i) { ((ConsExpr)expr).Add(ctx, newExpr2(ctx, tls, start, i)); } //kArray_clear(tls, s); //return expr; }
public Parser(Context ctx, KonohaSpace ks) { this.ctx = ctx; this.ks = ks; }
static int TokenizeUndefined(Context ctx, out Token token, TokenizerEnvironment tenv, int tokStart, KMethod thunk) { token = null; return tokStart; }