private readonly IToken _templateToken; // overall token pulled from group file #endregion Fields #region Constructors public TemplateLexerMessage(string sourceName, string message, IToken templateToken, Exception cause) : base(ErrorType.LEXER_ERROR, null, cause, null) { this._message = message; this._templateToken = templateToken; this._sourceName = sourceName; }
public ANTLRMessage([NotNull] ErrorType errorType, [Nullable] Exception e, IToken offendingToken, params object[] args) { this.errorType = errorType; this.e = e; this.args = args; this.offendingToken = offendingToken; }
IToken templateToken; // overall token pulled from group file #endregion Fields #region Constructors public TemplateLexerMessage(string srcName, string msg, IToken templateToken, Exception cause) : base(ErrorType.LEXER_ERROR, null, cause, null) { this.msg = msg; this.templateToken = templateToken; this.srcName = srcName; }
public GrammarSemanticsMessage( int msgID, Grammar g, IToken offendingToken, object arg ) : this(msgID, g, offendingToken, arg, null) { }
public TemplateCompiletimeMessage(ErrorType error, string sourceName, IToken templateToken, IToken token, Exception cause, object arg, object arg2) : base(error, null, cause, arg, arg2) { this._templateToken = templateToken; this._token = token; this._sourceName = sourceName; }
public GrammarSyntaxMessage( int msgID, Grammar grammar, IToken offendingToken, RecognitionException exception ) : this(msgID, grammar, offendingToken, null, exception) { }
public CommonTree(CommonTree node) : base(node) { this.token = node.token; this.startIndex = node.startIndex; this.stopIndex = node.stopIndex; }
/// <summary> /// Create tree node that holds the start and stop tokens associated /// with an error. /// </summary> /// <remarks> /// <para>If you specify your own kind of tree nodes, you will likely have to /// override this method. CommonTree returns Token.INVALID_TOKEN_TYPE /// if no token payload but you might have to set token type for diff /// node type.</para> /// /// <para>You don't have to subclass CommonErrorNode; you will likely need to /// subclass your own tree node class to avoid class cast exception.</para> /// </remarks> public virtual object ErrorNode(ITokenStream input, IToken start, IToken stop, RecognitionException e) { CommonErrorNode t = new CommonErrorNode(input, start, stop, e); //System.out.println("returning error node '"+t+"' @index="+input.index()); return t; }
public ActionSniffer(Grammar g, Rule r, Alternative alt, ActionAST node, IToken actionToken) { this.g = g; this.r = r; this.alt = alt; this.node = node; this.actionToken = actionToken; this.errMgr = g.tool.errMgr; }
public virtual void ProcessNested(IToken actionToken) { ANTLRStringStream @in = new ANTLRStringStream(actionToken.Text); @in.Line = actionToken.Line; @in.CharPositionInLine = actionToken.CharPositionInLine; ActionSplitter splitter = new ActionSplitter(@in, this); // forces eval, triggers listener methods splitter.GetActionTokens(); }
public GrammarRootAST(IToken t, ITokenStream tokenStream) : base(t) { if (tokenStream == null) { throw new ArgumentNullException(nameof(tokenStream)); } this.tokenStream = tokenStream; }
public GrammarRootAST(int type, IToken t, string text, ITokenStream tokenStream) : base(type, t, text) { if (tokenStream == null) { throw new ArgumentNullException(nameof(tokenStream)); } this.tokenStream = tokenStream; }
public GrammarSemanticsMessage( int msgID, Grammar g, IToken offendingToken, object arg, object arg2 ) : base(msgID, arg, arg2) { this.g = g; this.offendingToken = offendingToken; }
public GrammarSyntaxMessage( int msgID, Grammar grammar, IToken offendingToken, Object arg, RecognitionException exception ) : base(msgID, arg, null) { this.offendingToken = offendingToken; this.exception = exception; this.g = grammar; }
public GrammarSemanticsMessage(ErrorType etype, string fileName, IToken offendingToken, params object[] args) : base(etype, offendingToken, args) { this.fileName = fileName; if (offendingToken != null) { line = offendingToken.Line; charPosition = offendingToken.CharPositionInLine; } }
public virtual void TrackRef(IToken x) { IList<TerminalAST> xRefs; if (alt.tokenRefs.TryGetValue(x.Text, out xRefs) && xRefs != null) { alt.tokenRefsInActions.Map(x.Text, node); } IList<GrammarAST> rRefs; if (alt.ruleRefs.TryGetValue(x.Text, out rRefs) && rRefs != null) { alt.ruleRefsInActions.Map(x.Text, node); } }
public GrammarSyntaxMessage(ErrorType etype, string fileName, IToken offendingToken, RecognitionException antlrException, params object[] args) : base(etype, antlrException, offendingToken, args) { this.fileName = fileName; this.offendingToken = offendingToken; if (offendingToken != null) { line = offendingToken.Line; charPosition = offendingToken.CharPositionInLine; } }
public static IList<ActionChunk> TranslateAction(OutputModelFactory factory, RuleFunction rf, IToken tokenWithinAction, ActionAST node) { string action = tokenWithinAction.Text; if (action != null && action.Length > 0 && action[0] == '{') { int firstCurly = action.IndexOf('{'); int lastCurly = action.LastIndexOf('}'); if (firstCurly >= 0 && lastCurly >= 0) { action = action.Substring(firstCurly + 1, lastCurly - firstCurly - 1); // trim {...} } } return TranslateActionChunk(factory, rf, action, node); }
public PlusBlockAST(int type, IToken t, IToken nongreedy) : base(type, t) { _greedy = nongreedy == null; }
public virtual object BecomeRoot(IToken newRoot, object oldRoot) { return(BecomeRoot(Create(newRoot), oldRoot)); }
/// <summary> /// Tell me how to create a token for use with imaginary token nodes. /// For example, there is probably no input symbol associated with imaginary /// token DECL, but you need to create it as a payload or whatever for /// the DECL node as in ^(DECL type ID). /// /// This is a variant of createToken where the new token is derived from /// an actual real input token. Typically this is for converting '{' /// tokens to BLOCK etc... You'll see /// /// r : lc='{' ID+ '}' -> ^(BLOCK[$lc] ID+) ; /// /// If you care what the token payload objects' type is, you should /// override this method and any other createToken variant. /// </summary> public abstract IToken CreateToken(IToken fromToken);
private void CheckCommands(string command, IToken commandToken) { // Command combinations list: https://github.com/antlr/antlr4/issues/1388#issuecomment-263344701 if (!command.Equals("pushMode") && !command.Equals("popMode")) { if (ruleCommands.Contains(command)) { g.tool.errMgr.GrammarError(ErrorType.DUPLICATED_COMMAND, g.fileName, commandToken, command); } if (!ruleCommands.Equals("mode")) { string firstCommand = null; if (command.Equals("skip")) { if (ruleCommands.Contains("more")) { firstCommand = "more"; } else if (ruleCommands.Contains("type")) { firstCommand = "type"; } else if (ruleCommands.Contains("channel")) { firstCommand = "channel"; } } else if (command.Equals("more")) { if (ruleCommands.Contains("skip")) { firstCommand = "skip"; } else if (ruleCommands.Contains("type")) { firstCommand = "type"; } else if (ruleCommands.Contains("channel")) { firstCommand = "channel"; } } else if (command.Equals("type") || command.Equals("channel")) { if (ruleCommands.Contains("more")) { firstCommand = "more"; } else if (ruleCommands.Contains("skip")) { firstCommand = "skip"; } } if (firstCommand != null) { g.tool.errMgr.GrammarError(ErrorType.INCOMPATIBLE_COMMANDS, g.fileName, commandToken, firstCommand, command); } } } ruleCommands.Add(command); }
public virtual object BecomeRoot(IToken newRoot, object oldRoot) { return BecomeRoot(Create(newRoot), oldRoot); }
public override void ConsumeHiddenToken(IToken t) { string buf = SerializeToken(t); Transmit("consumeHiddenToken\t" + buf); }
public override object Create(Antlr.Runtime.IToken token) { if (token == null) { return(new CommonTree()); } switch (token.Type) { case LinqToQuerystringLexer.TOP: return(new TopNode(inputType, token, this)); case LinqToQuerystringLexer.SKIP: return(new SkipNode(inputType, token, this)); case LinqToQuerystringLexer.ORDERBY: return(new OrderByNode(inputType, token, this)); case LinqToQuerystringLexer.FILTER: return(new FilterNode(inputType, token, this)); case LinqToQuerystringLexer.SELECT: return(new SelectNode(inputType, token, this)); case LinqToQuerystringLexer.INLINECOUNT: return(new InlineCountNode(inputType, token, this)); case LinqToQuerystringLexer.EXPAND: return(new ExpandNode(inputType, token, this)); case LinqToQuerystringLexer.NOT: return(new NotNode(inputType, token, this)); case LinqToQuerystringLexer.AND: return(new AndNode(inputType, token, this)); case LinqToQuerystringLexer.OR: return(new OrNode(inputType, token, this)); case LinqToQuerystringLexer.EQUALS: return(new EqualsNode(inputType, token, this)); case LinqToQuerystringLexer.NOTEQUALS: return(new NotEqualsNode(inputType, token, this)); case LinqToQuerystringLexer.GREATERTHAN: return(new GreaterThanNode(inputType, token, this)); case LinqToQuerystringLexer.GREATERTHANOREQUAL: return(new GreaterThanOrEqualNode(inputType, token, this)); case LinqToQuerystringLexer.LESSTHAN: return(new LessThanNode(inputType, token, this)); case LinqToQuerystringLexer.LESSTHANOREQUAL: return(new LessThanOrEqualNode(inputType, token, this)); case LinqToQuerystringLexer.STARTSWITH: return(new StartsWithNode(inputType, token, this)); case LinqToQuerystringLexer.ENDSWITH: return(new EndsWithNode(inputType, token, this)); case LinqToQuerystringLexer.SUBSTRINGOF: return(new SubstringOfNode(inputType, token, this)); case LinqToQuerystringLexer.TOLOWER: return(new ToLowerNode(inputType, token, this)); case LinqToQuerystringLexer.TOUPPER: return(new ToUpperNode(inputType, token, this)); case LinqToQuerystringLexer.YEAR: return(new YearNode(inputType, token, this)); case LinqToQuerystringLexer.YEARS: return(new YearsNode(inputType, token, this)); case LinqToQuerystringLexer.MONTH: return(new MonthNode(inputType, token, this)); case LinqToQuerystringLexer.DAY: return(new DayNode(inputType, token, this)); case LinqToQuerystringLexer.DAYS: return(new DaysNode(inputType, token, this)); case LinqToQuerystringLexer.HOUR: return(new HourNode(inputType, token, this)); case LinqToQuerystringLexer.HOURS: return(new HoursNode(inputType, token, this)); case LinqToQuerystringLexer.MINUTE: return(new MinuteNode(inputType, token, this)); case LinqToQuerystringLexer.MINUTES: return(new MinutesNode(inputType, token, this)); case LinqToQuerystringLexer.SECOND: return(new SecondNode(inputType, token, this)); case LinqToQuerystringLexer.SECONDS: return(new SecondsNode(inputType, token, this)); case LinqToQuerystringLexer.ANY: return(new AnyNode(inputType, token, this)); case LinqToQuerystringLexer.ALL: return(new AllNode(inputType, token, this)); case LinqToQuerystringLexer.COUNT: return(new CountNode(inputType, token, this)); case LinqToQuerystringLexer.AVERAGE: return(new AverageNode(inputType, token, this)); case LinqToQuerystringLexer.MAX: return(new MaxNode(inputType, token, this)); case LinqToQuerystringLexer.MIN: return(new MinNode(inputType, token, this)); case LinqToQuerystringLexer.SUM: return(new SumNode(inputType, token, this)); case LinqToQuerystringLexer.ALIAS: return(new AliasNode(inputType, token, this)); case LinqToQuerystringLexer.DYNAMICIDENTIFIER: return(new DynamicIdentifierNode(inputType, token, this)); case LinqToQuerystringLexer.IDENTIFIER: if (forceDynamicProperties) { return(new DynamicIdentifierNode(inputType, token, this)); } return(new IdentifierNode(inputType, token, this)); case LinqToQuerystringLexer.STRING: return(new StringNode(inputType, token, this)); case LinqToQuerystringLexer.BOOL: return(new BoolNode(inputType, token, this)); case LinqToQuerystringLexer.INT: return(new IntNode(inputType, token, this)); case LinqToQuerystringLexer.DATETIME: return(new DateTimeNode(inputType, token, this)); case LinqToQuerystringLexer.DOUBLE: return(new DoubleNode(inputType, token, this)); case LinqToQuerystringLexer.SINGLE: return(new SingleNode(inputType, token, this)); case LinqToQuerystringLexer.DECIMAL: return(new DecimalNode(inputType, token, this)); case LinqToQuerystringLexer.LONG: return(new LongNode(inputType, token, this)); case LinqToQuerystringLexer.BYTE: return(new ByteNode(inputType, token, this)); case LinqToQuerystringLexer.GUID: return(new GuidNode(inputType, token, this)); case LinqToQuerystringLexer.DESC: return(new DescNode(inputType, token, this)); case LinqToQuerystringLexer.ASC: return(new AscNode(inputType, token, this)); case LinqToQuerystringLexer.NULL: return(new NullNode(inputType, token, this)); case LinqToQuerystringLexer.IGNORED: return(new IgnoredNode(inputType, token, this)); } return(null); }
public RuleLabelScope(Rule referencedRule, IToken actionToken) : base("ref_" + referencedRule.Name, actionToken) { this.referencedRule = referencedRule; }
override public object Create(IToken payload) { return(new TreePattern(payload)); }
public v3TreeGrammarException(IToken location) { this.location = location; }
public override object Create(int tokenType, string text) { Antlr.Runtime.IToken token = CreateToken(tokenType, text); return(new LSLAst(token)); }
public virtual void NonLocalAttr(string expr, IToken x, IToken y) { }
public TemplateCompiletimeMessage(ErrorType error, string sourceName, IToken templateToken, IToken token) : this(error, sourceName, templateToken, token, null) { }
public override void CreateNode(object node, IToken token) { int ID = adaptor.GetUniqueID(node); int tokenIndex = token.TokenIndex; Transmit("createNode\t" + ID + "\t" + tokenIndex); }
public TemplateCompiletimeMessage(ErrorType error, string sourceName, IToken templateToken, IToken token, Exception cause, object arg) : this(error, sourceName, templateToken, token, cause, arg, null) { }
public virtual object Create(int tokenType, IToken fromToken, string text) { fromToken = CreateToken(fromToken); fromToken.Type = tokenType; fromToken.Text = text; ITree t = (ITree) Create(fromToken); return t; }
public TreePattern(IToken payload) : base(payload) { }
public StringTemplateAST(IToken payload) : base(payload) { }
public abstract void SetTokenBoundaries(object param1, IToken param2, IToken param3);
public RuleRefAST(IToken t) : base(t) { }
public abstract object Create(IToken param1);
public RuleRefAST(int type, IToken t) : base(type, t) { }
public TemplateMessage(ErrorType error, Template self, Exception cause, IToken where, object arg) : this(error, self, cause, where) { this.arg = arg; }
public override object ErrorNode(Antlr.Runtime.ITokenStream input, Antlr.Runtime.IToken start, Antlr.Runtime.IToken stop, Antlr.Runtime.RecognitionException e) { return(new LSLErrorNode(input, start, stop, e)); }
public override object Create(int tokenType, Antlr.Runtime.IToken fromToken) { fromToken = CreateToken(fromToken); fromToken.Type = tokenType; return(new LSLAst(fromToken)); }
public override IList PostProcessAction(IList chunks, IToken actionToken) { /* TODO * - check for and report TAB usage */ //System.out.println("\n*** Action at " + actionToken.getLine() + ":" + actionToken.getColumn()); /* First I create a new list of chunks. String chunks are splitted into * lines and some whitespace my be added at the beginning. * * As a result I get a list of chunks * - where the first line starts at column 0 * - where every LF is at the end of a string chunk */ List <object> nChunks = new List <object>(); for (int i = 0; i < chunks.Count; i++) { object chunk = chunks[i]; string text = chunk as string; if (text != null) { if (nChunks.Count == 0 && actionToken.CharPositionInLine > 0) { // first chunk and some 'virtual' WS at beginning // prepend to this chunk string ws = new string( ' ', actionToken.CharPositionInLine ); text = ws + text; } nChunks.AddRange(text.Split('\n')); } else { if (nChunks.Count == 0 && actionToken.CharPositionInLine > 0) { // first chunk and some 'virtual' WS at beginning // add as a chunk of its own string ws = new string( ' ', actionToken.CharPositionInLine ); nChunks.Add(ws); } nChunks.Add(chunk); } } int lineNo = actionToken.Line; int col = 0; // strip trailing empty lines int lastChunk = nChunks.Count - 1; while (lastChunk > 0 && nChunks[lastChunk] is string && ((string)nChunks[lastChunk]).Trim().Length == 0) { lastChunk--; } // string leading empty lines int firstChunk = 0; while (firstChunk <= lastChunk && nChunks[firstChunk] is string && ((string)nChunks[firstChunk]).Trim().Length == 0 && ((string)nChunks[firstChunk]).EndsWith("\n")) { lineNo++; firstChunk++; } int indent = -1; for (int i = firstChunk; i <= lastChunk; i++) { object chunk = nChunks[i]; //System.out.println(lineNo + ":" + col + " " + quote(chunk.toString())); string text = chunk as string; if (text != null) { if (col == 0) { if (indent == -1) { // first non-blank line // count number of leading whitespaces indent = 0; for (int j = 0; j < text.Length; j++) { if (!char.IsWhiteSpace(text[j])) { break; } indent++; } } if (text.Length >= indent) { int j; for (j = 0; j < indent; j++) { if (!char.IsWhiteSpace(text[j])) { // should do real error reporting here... Console.Error.WriteLine("Warning: badly indented line " + lineNo + " in action:"); Console.Error.WriteLine(text); break; } } nChunks[i] = text.Substring(j); } else if (text.Trim().Length > 0) { // should do real error reporting here... Console.Error.WriteLine("Warning: badly indented line " + lineNo + " in action:"); Console.Error.WriteLine(text); } } if (text.EndsWith("\n")) { lineNo++; col = 0; } else { col += text.Length; } } else { // not really correct, but all I need is col to increment... col += 1; } } return(nChunks); }
public override void QualifiedAttr(string expr, IToken x, IToken y) { dependent = true; }
public virtual void SetNonLocalAttr(string expr, IToken x, IToken y, IToken rhs) { }
public override void SetNonLocalAttr(string expr, IToken x, IToken y, IToken rhs) { dependent = true; }
public override void LT(int i, IToken t) { if (t != null) Transmit("LT\t" + i + "\t" + SerializeToken(t)); }
public override void Attr(string expr, IToken x) { dependent = true; }
protected internal virtual string SerializeToken(IToken t) { StringBuilder buf = new StringBuilder(50); buf.Append(t.TokenIndex); buf.Append('\t'); buf.Append(t.Type); buf.Append('\t'); buf.Append(t.Channel); buf.Append('\t'); buf.Append(t.Line); buf.Append('\t'); buf.Append(t.CharPositionInLine); SerializeText(buf, t.Text); return buf.ToString(); }
public WildcardTreePattern(IToken payload) : base(payload) { }
public virtual object Create(int tokenType, IToken fromToken) { fromToken = CreateToken(fromToken); fromToken.Type = tokenType; ITree t = (ITree) Create(fromToken); return t; }
public CommonTree(IToken t) { this.token = t; }
public GrammarSemanticsMessage(int msgID, Grammar g, IToken offendingToken) : this(msgID, g, offendingToken, null, null) { }
private void ExtractRuleSpans() { Dictionary <string, KeyValuePair <ITrackingSpan, ITrackingPoint> > rules = new Dictionary <string, KeyValuePair <ITrackingSpan, ITrackingPoint> >(); var antlrParseResultArgs = PreviousParseResult; if (antlrParseResultArgs != null) { IAstRuleReturnScope resultArgs = antlrParseResultArgs.Result as IAstRuleReturnScope; var result = resultArgs != null ? resultArgs.Tree as CommonTree : null; if (result != null) { foreach (CommonTree child in result.Children) { if (child == null || string.IsNullOrEmpty(child.Text)) { continue; } if (child.Text == "rule" && child.ChildCount > 0) { var ruleName = child.GetChild(0).Text; if (string.IsNullOrEmpty(ruleName)) { continue; } if (ruleName == "Tokens") { continue; } IToken startToken = antlrParseResultArgs.Tokens[child.TokenStartIndex]; IToken stopToken = antlrParseResultArgs.Tokens[child.TokenStopIndex]; Span span = new Span(startToken.StartIndex, stopToken.StopIndex - startToken.StartIndex + 1); ITrackingSpan trackingSpan = antlrParseResultArgs.Snapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeNegative); ITrackingPoint trackingPoint = antlrParseResultArgs.Snapshot.CreateTrackingPoint(((CommonTree)child.GetChild(0)).Token.StartIndex, PointTrackingMode.Negative); rules[ruleName] = new KeyValuePair <ITrackingSpan, ITrackingPoint>(trackingSpan, trackingPoint); } else if (child.Text.StartsWith("tokens")) { foreach (CommonTree tokenChild in child.Children) { if (tokenChild.Text == "=" && tokenChild.ChildCount == 2) { var ruleName = tokenChild.GetChild(0).Text; if (string.IsNullOrEmpty(ruleName)) { continue; } IToken startToken = antlrParseResultArgs.Tokens[tokenChild.TokenStartIndex]; IToken stopToken = antlrParseResultArgs.Tokens[tokenChild.TokenStopIndex]; Span span = new Span(startToken.StartIndex, stopToken.StopIndex - startToken.StartIndex + 1); ITrackingSpan trackingSpan = antlrParseResultArgs.Snapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeNegative); ITrackingPoint trackingPoint = antlrParseResultArgs.Snapshot.CreateTrackingPoint(((CommonTree)tokenChild.GetChild(0)).Token.StartIndex, PointTrackingMode.Negative); rules[ruleName] = new KeyValuePair <ITrackingSpan, ITrackingPoint>(trackingSpan, trackingPoint); } else if (tokenChild.ChildCount == 0) { var ruleName = tokenChild.Text; if (string.IsNullOrEmpty(ruleName)) { continue; } IToken startToken = antlrParseResultArgs.Tokens[tokenChild.TokenStartIndex]; IToken stopToken = antlrParseResultArgs.Tokens[tokenChild.TokenStopIndex]; Span span = new Span(startToken.StartIndex, stopToken.StopIndex - startToken.StartIndex + 1); ITrackingSpan trackingSpan = antlrParseResultArgs.Snapshot.CreateTrackingSpan(span, SpanTrackingMode.EdgeNegative); ITrackingPoint trackingPoint = antlrParseResultArgs.Snapshot.CreateTrackingPoint(tokenChild.Token.StartIndex, PointTrackingMode.Negative); rules[ruleName] = new KeyValuePair <ITrackingSpan, ITrackingPoint>(trackingSpan, trackingPoint); } } } } } } RuleSpans = rules; }