public KStatement(LineInfo line, KonohaSpace ks) { this.ULine = line; this.ks = ks; annotation = new Dictionary<KeywordType, bool>(); map = new Dictionary<object, KonohaExpr>(); }
// static int matchSyntaxRule(CTX, kStmt *stmt, kArray *rules, kline_t /*parent*/uline, kArray *tls, int s, int e, int optional) public int matchSyntaxRule(Context ctx, IList<Token> rules, LineInfo /*parent*/uline, IList<Token> tls, int s, int e, bool optional) { int ri, ti, rule_size = rules.Count; ti = s; for (ri = 0; ri < rule_size && ti < e; ri++) { Token rule = rules[ri]; Token tk = tls[ti]; uline = tk.ULine; Console.WriteLine("matching rule={0},{1},{2} token={3},{4},{5}", ri, rule.Type, rule.Keyword, ti - s, tk.Type, tk.Text); if (rule.Type == TokenType.CODE) { if (rule.Keyword != tk.Keyword) { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.Keyword); return -1; } ti++; continue; } else if (rule.Type == TokenType.METANAME) { Syntax syn = this.ks.GetSyntax(rule.Keyword); if (syn == null || syn.ParseStmt == null) { tk.Print(ctx, ReportLevel.ERR, "unknown syntax pattern: {0}", rule.Keyword); return -1; } int c = e; if (ri + 1 < rule_size && rules[ri + 1].Type == TokenType.CODE) { c = lookAheadKeyword(tls, ti + 1, e, rules[ri + 1]); if (c == -1) { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.Keyword); return -1; } ri++; } int err_count = ctx.ctxsugar.err_count; int next = ParseStmt(ctx, syn, rule.nameid, tls, ti, c); Console.WriteLine("matched '{0}' nameid='{1}', next={2}=>{3}", rule.Keyword, rule.nameid.Name, ti, next); if (next == -1) { if (optional) { return s; } if (err_count == ctx.sugarerr_count) { tk.Print(ctx, ReportLevel.ERR, "unknown syntax pattern: {0}", this.syn.KeyWord, rule.Keyword, tk.Text); } return -1; } ////XXX Why??? //optional = 0; ti = (c == e) ? next : c + 1; continue; } else if (rule.Type == TokenType.AST_OPTIONAL) { int next = matchSyntaxRule(ctx, rule.Sub, uline, tls, ti, e, true); if (next == -1) { return -1; } ti = next; continue; } else if (rule.Type == TokenType.AST_PARENTHESIS || rule.Type == TokenType.AST_BRACE || rule.Type == TokenType.AST_BRANCET) { if (tk.Type == rule.Type && rule.TopChar == tk.TopChar) { int next = matchSyntaxRule(ctx, rule.Sub, uline, tk.Sub, 0, tk.Sub.Count, false); if (next == -1) { return -1; } ti++; } else { if (optional) { return s; } tk.Print(ctx, ReportLevel.ERR, "{0} needs '{1}'", this.syn.KeyWord, rule.TopChar); return -1; } } } if (!optional) { for (; ri < rules.Count; ri++) { Token rule = rules[ri]; if (rule.Type != TokenType.AST_OPTIONAL) { ctx.SUGAR_P(ReportLevel.ERR, uline, -1, "{0} needs syntax pattern: {1}", this.syn.KeyWord, rule.Keyword); return -1; } } //WARN_Ignored(_ctx, tls, ti, e); } return ti; }
// static size_t vperrorf(CTX, int pe, kline_t uline, int lpos, const char *fmt, va_list ap) uint vperrorf(ReportLevel pe, LineInfo uline, int lpos, string fmt, params object[] ap) { string msg = GetErrorTypeString(pe); uint errref = unchecked((uint)-1); if (msg != null) { var sugar = this.ctxsugar; if (uline != null) { string file = uline.Filename; if (file == string.Empty) file = "0"; Console.Write("{0} ({1}:{2}) ", msg, file, uline.LineNumber); } else { Console.Write(msg + ' '); } Console.Write(fmt, ap); errref = (uint)sugar.errors.Count; sugar.errors.strings.Add(msg); if (pe == ReportLevel.ERR || pe == ReportLevel.CRIT) { sugar.err_count++; } ReportError(pe, msg); } return errref; }
// token.h // static void parseSyntaxRule(CTX, const char *rule, kline_t pline, kArray *a); public void parseSyntaxRule(string rule, LineInfo pline, out List<Token> adst) { var tokenizer = new Tokenizer(ctx, this); var tokens = tokenizer.Tokenize(rule); makeSyntaxRule(tokens, 0, tokens.Count, out adst); }
// static size_t sugar_p(CTX, int pe, kline_t uline, int lpos, const char *fmt, ...) public uint SUGAR_P(ReportLevel pe, LineInfo line, int lpos, string format, params object[] param) { return vperrorf(pe, line, lpos, format, param); }