public virtual IntervalSet GetExpectedTokens(int stateNumber, RuleContext context) { if (stateNumber < 0 || stateNumber >= states.Count) { throw new ArgumentException("Invalid state number."); } RuleContext ctx = context; ATNState s = states[stateNumber]; IntervalSet following = NextTokens(s); if (!following.Contains(TokenConstants.EPSILON)) { return(following); } IntervalSet expected = new IntervalSet(); expected.AddAll(following); expected.Remove(TokenConstants.EPSILON); while (ctx != null && ctx.invokingState >= 0 && following.Contains(TokenConstants.EPSILON)) { ATNState invokingState = states[ctx.invokingState]; RuleTransition rt = (RuleTransition)invokingState.Transition(0); following = NextTokens(rt.followState); expected.AddAll(following); expected.Remove(TokenConstants.EPSILON); ctx = ctx.Parent; } if (following.Contains(TokenConstants.EPSILON)) { expected.Add(TokenConstants.EOF); } return(expected); }
/// <summary> /// The default implementation of /// <see cref="IAntlrErrorStrategy.Sync(Parser)"/> /// makes sure /// that the current lookahead symbol is consistent with what were expecting /// at this point in the ATN. You can call this anytime but ANTLR only /// generates code to check before subrules/loops and each iteration. /// <p>Implements Jim Idle's magic sync mechanism in closures and optional /// subrules. E.g.,</p> /// <pre> /// a : sync ( stuff sync )* ; /// sync : {consume to what can follow sync} ; /// </pre> /// At the start of a sub rule upon error, /// <see cref="Sync(Parser)"/> /// performs single /// token deletion, if possible. If it can't do that, it bails on the current /// rule and uses the default error recovery, which consumes until the /// resynchronization set of the current rule. /// <p>If the sub rule is optional ( /// <c>(...)?</c> /// , /// <c>(...)*</c> /// , or block /// with an empty alternative), then the expected set includes what follows /// the subrule.</p> /// <p>During loop iteration, it consumes until it sees a token that can start a /// sub rule or what follows loop. Yes, that is pretty aggressive. We opt to /// stay in the loop as long as possible.</p> /// <p><strong>ORIGINS</strong></p> /// <p>Previous versions of ANTLR did a poor job of their recovery within loops. /// A single mismatch token or missing token would force the parser to bail /// out of the entire rules surrounding the loop. So, for rule</p> /// <pre> /// classDef : 'class' ID '{' member* '}' /// </pre> /// input with an extra token between members would force the parser to /// consume until it found the next class definition rather than the next /// member definition of the current class. /// <p>This functionality cost a little bit of effort because the parser has to /// compare token set at the start of the loop and at each iteration. If for /// some reason speed is suffering for you, you can turn off this /// functionality by simply overriding this method as a blank { }.</p> /// </summary> /// <exception cref="Antlr4.Runtime.RecognitionException"/> public virtual void Sync(Parser recognizer) { ATNState s = recognizer.Interpreter.atn.states[recognizer.State]; // System.err.println("sync @ "+s.stateNumber+"="+s.getClass().getSimpleName()); // If already recovering, don't try to sync if (InErrorRecoveryMode(recognizer)) { return; } ITokenStream tokens = ((ITokenStream)recognizer.InputStream); int la = tokens.La(1); // try cheaper subset first; might get lucky. seems to shave a wee bit off IntervalSet nextTokens = recognizer.Atn.NextTokens(s); if (nextTokens.Contains(TokenConstants.Epsilon) || nextTokens.Contains(la)) { return; } switch (s.StateType) { case StateType.BlockStart: case StateType.StarBlockStart: case StateType.PlusBlockStart: case StateType.StarLoopEntry: { // report error and recover if possible if (SingleTokenDeletion(recognizer) != null) { return; } throw new InputMismatchException(recognizer); } case StateType.PlusLoopBack: case StateType.StarLoopBack: { // System.err.println("at loop back: "+s.getClass().getSimpleName()); ReportUnwantedToken(recognizer); IntervalSet expecting = recognizer.GetExpectedTokens(); IntervalSet whatFollowsLoopIterationOrRule = expecting.Or(GetErrorRecoverySet(recognizer)); ConsumeUntil(recognizer, whatFollowsLoopIterationOrRule); break; } default: { // do nothing if we can't identify the exact kind of ATN state break; } } }
protected internal virtual IToken SingleTokenDeletion([NotNull] Parser recognizer) { int nextTokenType = ((ITokenStream)recognizer.InputStream).La(2); IntervalSet expecting = GetExpectedTokens(recognizer); if (expecting.Contains(nextTokenType)) { ReportUnwantedToken(recognizer); /* * System.err.println("recoverFromMismatchedToken deleting "+ * ((TokenStream)recognizer.getInputStream()).LT(1)+ * " since "+((TokenStream)recognizer.getInputStream()).LT(2)+ * " is what we want"); */ recognizer.Consume(); // simply delete extra token // we want to return the token we're actually matching IToken matchedSymbol = recognizer.CurrentToken; ReportMatch(recognizer); // we know current token is correct return(matchedSymbol); } return(null); }
/// <summary> /// <inheritDoc></inheritDoc> /// <p/> /// The default implementation resynchronizes the parser by consuming tokens /// until we find one in the resynchronization set--loosely the set of tokens /// that can follow the current rule. /// </summary> public virtual void Recover(Parser recognizer, RecognitionException e) { // System.out.println("recover in "+recognizer.getRuleInvocationStack()+ // " index="+recognizer.getInputStream().index()+ // ", lastErrorIndex="+ // lastErrorIndex+ // ", states="+lastErrorStates); if (lastErrorIndex == ((ITokenStream)recognizer.InputStream).Index && lastErrorStates != null && lastErrorStates.Contains(recognizer.State)) { // uh oh, another error at same token index and previously-visited // state in ATN; must be a case where LT(1) is in the recovery // token set so nothing got consumed. Consume a single token // at least to prevent an infinite loop; this is a failsafe. // System.err.println("seen error condition before index="+ // lastErrorIndex+", states="+lastErrorStates); // System.err.println("FAILSAFE consumes "+recognizer.getTokenNames()[recognizer.getInputStream().LA(1)]); recognizer.Consume(); } lastErrorIndex = ((ITokenStream)recognizer.InputStream).Index; if (lastErrorStates == null) { lastErrorStates = new IntervalSet(); } lastErrorStates.Add(recognizer.State); IntervalSet followSet = GetErrorRecoverySet(recognizer); ConsumeUntil(recognizer, followSet); }
protected internal override void ConsumeUntil(Parser recognizer, IntervalSet set) { //Console.WriteLine("consumeUntil("+set.ToString()+")"); int ttype = ((ITokenStream)recognizer.InputStream).La(1); while (ttype != TokenConstants.Eof && !set.Contains(ttype) && ttype != XSharpLexer.EOS) { var t = recognizer.Consume(); //Console.WriteLine("consumeuntil:" + t.StartIndex.ToString()+":"+t.Text); ttype = ((ITokenStream)recognizer.InputStream).La(1); } }
public void TestMembership() { IntervalSet s = IntervalSet.Of(15, 15); s.Add(50, 60); Assert.IsFalse(s.Contains(0)); Assert.IsFalse(s.Contains(20)); Assert.IsFalse(s.Contains(100)); Assert.IsTrue(s.Contains(15)); Assert.IsTrue(s.Contains(55)); Assert.IsTrue(s.Contains(50)); Assert.IsTrue(s.Contains(60)); }
/// <summary> /// Multiple token deletion resynchronization strategy /// </summary> protected override void ConsumeUntil(Antlr4.Runtime.Parser recognizer, IntervalSet set) { Token lastConsumedToken = (Token)((ITokenStream)recognizer.InputStream).Lt(-1); Token nextToken = (Token)((ITokenStream)recognizer.InputStream).Lt(1); int ttype = nextToken.Type; while (ttype != TokenConstants.Eof && !set.Contains(ttype) && !ErrorStrategyShouldNotConsumeThisToken(lastConsumedToken, nextToken)) { recognizer.Consume(); lastConsumedToken = nextToken; nextToken = (Token)((ITokenStream)recognizer.InputStream).Lt(1); ttype = nextToken.Type; } }
/// <summary> /// This method implements the single-token insertion inline error recovery /// strategy. /// </summary> /// <remarks> /// This method implements the single-token insertion inline error recovery /// strategy. It is called by /// <see cref="RecoverInline(Parser)"/> /// if the single-token /// deletion strategy fails to recover from the mismatched input. If this /// method returns /// <see langword="true"/> /// , /// <paramref name="recognizer"/> /// will be in error recovery /// mode. /// <p>This method determines whether or not single-token insertion is viable by /// checking if the /// <c>LA(1)</c> /// input symbol could be successfully matched /// if it were instead the /// <c>LA(2)</c> /// symbol. If this method returns /// <see langword="true"/> /// , the caller is responsible for creating and inserting a /// token with the correct type to produce this behavior.</p> /// </remarks> /// <param name="recognizer">the parser instance</param> /// <returns> /// /// <see langword="true"/> /// if single-token insertion is a viable recovery /// strategy for the current mismatched input, otherwise /// <see langword="false"/> /// </returns> protected internal virtual bool SingleTokenInsertion(Parser recognizer) { int currentSymbolType = ((ITokenStream)recognizer.InputStream).LA(1); // if current token is consistent with what could come after current // ATN state, then we know we're missing a token; error recovery // is free to conjure up and insert the missing token ATNState currentState = recognizer.Interpreter.atn.states[recognizer.State]; ATNState next = currentState.Transition(0).target; ATN atn = recognizer.Interpreter.atn; IntervalSet expectingAtLL2 = atn.NextTokens(next, recognizer.RuleContext); if (expectingAtLL2.Contains(currentSymbolType)) { ReportMissingToken(recognizer); return(true); } return(false); }
/// <summary> /// This method implements the single-token insertion inline error recovery /// strategy. /// </summary> /// <remarks> /// This method implements the single-token insertion inline error recovery /// strategy. It is called by /// <see cref="RecoverInline(Parser)"/> /// if the single-token /// deletion strategy fails to recover from the mismatched input. If this /// method returns /// <see langword="true"/> /// , /// <paramref name="recognizer"/> /// will be in error recovery /// mode. /// <p>This method determines whether or not single-token insertion is viable by /// checking if the /// <c>LA(1)</c> /// input symbol could be successfully matched /// if it were instead the /// <c>LA(2)</c> /// symbol. If this method returns /// <see langword="true"/> /// , the caller is responsible for creating and inserting a /// token with the correct type to produce this behavior.</p> /// </remarks> /// <param name="recognizer">the parser instance</param> /// <returns> /// /// <see langword="true"/> /// if single-token insertion is a viable recovery /// strategy for the current mismatched input, otherwise /// <see langword="false"/> /// </returns> protected internal virtual bool SingleTokenInsertion([NotNull] Parser recognizer) { int currentSymbolType = ((ITokenStream)recognizer.InputStream).La(1); // if current token is consistent with what could come after current // ATN state, then we know we're missing a token; error recovery // is free to conjure up and insert the missing token ATNState currentState = recognizer.Interpreter.atn.states[recognizer.State]; ATNState next = currentState.Transition(0).target; ATN atn = recognizer.Interpreter.atn; IntervalSet expectingAtLL2 = atn.NextTokens(next, PredictionContext.FromRuleContext(atn, recognizer._ctx)); // System.out.println("LT(2) set="+expectingAtLL2.toString(recognizer.getTokenNames())); if (expectingAtLL2.Contains(currentSymbolType)) { ReportMissingToken(recognizer); return(true); } return(false); }
protected override void ConsumeUntil([NotNull] Parser recognizer, [NotNull] IntervalSet set) { //Console.WriteLine("6"); int ttype = ((ITokenStream)recognizer.InputStream).La(1); while (ttype != TokenConstants.Eof && !set.Contains(ttype)) { //Console.WriteLine("6Consuming: " + recognizer.CurrentToken.Text); if (PreviousToken.Length == 0 || true) { //recognizer.NotifyErrorListeners(recognizer.CurrentToken, "default", null); ReportInputMismatch(recognizer, new InputMismatchException(recognizer)); PreviousToken = recognizer.CurrentToken.Text; } else { PreviousToken = ""; } recognizer.Consume(); ttype = ((ITokenStream)recognizer.InputStream).La(1); } PreviousToken = ""; }
// Step to state and continue parsing input. // Returns a list of transitions leading to a state that accepts input. private List <List <Edge> > EnterState(List <int> p, Edge t, int indent) { int here = ++entry_value; int index_on_transition = t._index_at_transition; int token_index = t._index; ATNState state = t._to; var rule_match = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var start_rule = rule_match.Any() ? rule_match.FirstOrDefault().ruleIndex : -1; var q = p.ToList(); if (start_rule >= 0) { q.Add(start_rule); } // Upon reaching the cursor, return match. bool at_match = token_index == _cursor; if (at_match) { if (_log_parse) { System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } IToken input_token = _input[token_index]; if (_log_parse) { var name = (start_rule >= 0) ? (" " + _parser.RuleNames[start_rule]) : ""; System.Console.Error.WriteLine( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text ); } bool at_match2 = input_token.TokenIndex >= _cursor; if (at_match2) { if (_log_parse) { System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } if (_visited.ContainsKey(new Tuple <ATNState, int, int>(state, token_index, indent))) { if (_visited_extra.ContainsKey(new Tuple <ATNState, int, int, List <int> >( state, token_index, indent, p))) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "already visited."); } return(null); } } _visited_extra[new Tuple <ATNState, int, int, List <int> >(state, token_index, indent, q)] = true; _visited[new Tuple <ATNState, int, int>(state, token_index, indent)] = true; List <List <Edge> > result = new List <List <Edge> >(); if (_stop_states.Contains(state)) { if (_log_parse) { var n = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var r = n.Any() ? n.FirstOrDefault().ruleIndex : -1; var name = (r >= 0) ? (" " + _parser.RuleNames[r]) : ""; System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } // Search all transitions from state. foreach (Transition transition in state.TransitionsArray) { List <List <Edge> > matches = null; switch (transition.TransitionType) { case TransitionType.RULE: { RuleTransition rule = (RuleTransition)transition; ATNState sub_state = rule.target; matches = EnterState(q, new Edge() { _from = state, _to = rule.target, _follow = rule.followState, _label = rule.Label, _type = rule.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } if (matches != null) { List <List <Edge> > new_matches = new List <List <Edge> >(); foreach (List <Edge> match in matches) { Edge f = match.First(); // "to" is possibly final state of submachine. Edge l = match.Last(); // "to" is start state of submachine. bool is_final = _stop_states.Contains(f._to); bool is_at_caret = f._index >= _cursor; if (!is_final) { new_matches.Add(match); } else { List <List <Edge> > xxx = EnterState(q, new Edge() { _from = f._to, _to = rule.followState, _label = null, _type = TransitionType.EPSILON, _index = f._index, _index_at_transition = f._index }, indent + 1); if (xxx != null && xxx.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } if (xxx != null) { foreach (List <Edge> y in xxx) { List <Edge> copy = y.ToList(); foreach (Edge g in match) { copy.Add(g); } new_matches.Add(copy); } } } } matches = new_matches; } } break; case TransitionType.PREDICATE: if (CheckPredicate((PredicateTransition)transition)) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } break; case TransitionType.WILDCARD: matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } break; default: if (transition.IsEpsilon) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } else { IntervalSet set = transition.Label; if (set != null && set.Count > 0) { if (transition.TransitionType == TransitionType.NOT_SET) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, _parser.Atn.maxTokenType)); } if (set.Contains(input_token.Type)) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } } } break; } if (matches != null) { foreach (List <Edge> match in matches) { List <Edge> x = match.ToList(); if (t != null) { x.Add(t); Edge prev = null; foreach (Edge z in x) { ATNState ff = z._to; if (prev != null) { if (prev._from != ff) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "Fail " + PrintSingle(x)); Debug.Assert(false); } } prev = z; } } result.Add(x); } } } if (result.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "result empty."); } return(null); } if (_log_parse) { var n = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var r = n.Any() ? n.FirstOrDefault().ruleIndex : -1; var name = (r >= 0) ? (" " + _parser.RuleNames[r]) : ""; System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text + " return "); string str = PrintResult(result); System.Console.Error.WriteLine( str); } return(result); }
protected override void ConsumeUntil(Parser recognizer, IntervalSet set) { //System.out.println("consumeUntil("+set.toString(getTokenNames())+")"); int ttype = recognizer.InputStream.La(1); while (ttype != TokenConstants.Eof && ttype != CaretToken.CaretTokenType && !set.Contains(ttype)) { //System.out.println("consume during recover LA(1)="+getTokenNames()[input.LA(1)]); recognizer.InputStream.Consume(); //recognizer.consume(); ttype = recognizer.InputStream.La(1); } }
protected virtual void TryParse(T parser, List <MultipleDecisionData> potentialAlternatives, List <int> currentPath, IDictionary <RuleContext, CaretReachedException> results) { RuleContext parseTree; try { parser.Interpreter.SetFixedDecisions(potentialAlternatives, currentPath); parseTree = ParseImpl(parser); results[parseTree] = null; } catch (CaretReachedException ex) { if (ex.Transitions == null) { return; } if (ex.InnerException is FailedPredicateException) { return; } for (parseTree = ex.FinalContext; parseTree.Parent != null; parseTree = parseTree.Parent) { // intentionally blank } if (ex.InnerException != null) { IntervalSet alts = new IntervalSet(); IntervalSet semanticAlts = new IntervalSet(); foreach (ATNConfig c in ex.Transitions.Keys) { if (semanticAlts.Contains(c.Alt)) { continue; } alts.Add(c.Alt); var recognizer = parser as Recognizer <IToken, ParserATNSimulator>; if (recognizer == null || c.SemanticContext.Eval(recognizer, ex.FinalContext)) { semanticAlts.Add(c.Alt); } } if (alts.Count != semanticAlts.Count) { Console.WriteLine("Forest decision {0} reduced to {1} by predicate evaluation.", alts, semanticAlts); } int inputIndex = parser.InputStream.Index; int decision = 0; int stateNumber = ex.InnerException.OffendingState; ATNState state = parser.Atn.states[stateNumber]; if (state is StarLoopbackState) { Debug.Assert(state.NumberOfTransitions == 1 && state.OnlyHasEpsilonTransitions); Debug.Assert(state.Transition(0).target is StarLoopEntryState); state = state.Transition(0).target; } else { PlusBlockStartState plusBlockStartState = state as PlusBlockStartState; if (plusBlockStartState != null && plusBlockStartState.decision == -1) { state = plusBlockStartState.loopBackState; Debug.Assert(state != null); } } DecisionState decisionState = state as DecisionState; if (decisionState != null) { decision = decisionState.decision; if (decision < 0) { Debug.WriteLine(string.Format("No decision number found for state {0}.", state.stateNumber)); } } else { if (state != null) { Debug.WriteLine(string.Format("No decision number found for state {0}.", state.stateNumber)); } else { Debug.WriteLine("No decision number found for state <null>."); } // continuing is likely to terminate return; } Debug.Assert(semanticAlts.MinElement >= 1); Debug.Assert(semanticAlts.MaxElement <= parser.Atn.decisionToState[decision].NumberOfTransitions); int[] alternatives = semanticAlts.ToArray(); MultipleDecisionData decisionData = new MultipleDecisionData(inputIndex, decision, alternatives); potentialAlternatives.Add(decisionData); currentPath.Add(-1); } else { results[parseTree] = ex; } } catch (RecognitionException ex) { // not a viable path } }
protected override AlignmentRequirements GetAlignmentRequirement(KeyValuePair <RuleContext, CaretReachedException> parseTree, IParseTree targetElement, IParseTree ancestor) { if (ancestor == targetElement && !(targetElement is ITerminalNode)) { // special handling for predicted tokens that don't actually exist yet CaretReachedException ex = parseTree.Value; if (ex != null && ex.Transitions != null) { bool validTransition = false; bool selfTransition = false; // examine transitions for predictions that don't actually exist yet foreach (KeyValuePair <ATNConfig, IList <Transition> > entry in ex.Transitions) { if (entry.Value == null) { continue; } foreach (Transition transition in entry.Value) { IntervalSet label = transition.Label; if (label == null) { continue; } bool containsInvalid = label.Contains(GrammarParser.OR) || label.Contains(GrammarParser.RPAREN) || label.Contains(GrammarParser.RBRACE) || label.Contains(GrammarParser.END_ACTION) || label.Contains(GrammarParser.END_ARG_ACTION) || label.Contains(GrammarParser.OPTIONS) || label.Contains(GrammarParser.AT) || label.Contains(GrammarParser.ASSIGN) || label.Contains(GrammarParser.SEMI) || label.Contains(GrammarParser.COMMA) || label.Contains(GrammarParser.MODE) || label.Contains(GrammarParser.RARROW) || label.Contains(GrammarParser.POUND); bool containsInvalidSelf = label.Contains(GrammarParser.LPAREN) || label.Contains(GrammarParser.BEGIN_ACTION) || label.Contains(GrammarParser.BEGIN_ARG_ACTION); if (transition is NotSetTransition) { containsInvalid = !containsInvalid; containsInvalidSelf = !containsInvalidSelf; } validTransition |= !containsInvalid; selfTransition |= !containsInvalidSelf; } } if (!validTransition) { return(AlignmentRequirements.IgnoreTree); } else if (!selfTransition) { return(AlignmentRequirements.UseAncestor); } } } IRuleNode ruleNode = ancestor as IRuleNode; if (ruleNode == null) { return(AlignmentRequirements.UseAncestor); } RuleContext ruleContext = ruleNode.RuleContext; switch (ruleContext.RuleIndex) { case GrammarParser.RULE_parserRuleSpec: if (((GrammarParser.ParserRuleSpecContext)ruleContext).RULE_REF() == null) { return(AlignmentRequirements.UseAncestor); } return(AlignmentRequirements.PriorSibling); case GrammarParser.RULE_lexerRule: if (((GrammarParser.LexerRuleContext)ruleContext).TOKEN_REF() == null) { return(AlignmentRequirements.UseAncestor); } return(AlignmentRequirements.PriorSibling); case GrammarParser.RULE_ruleAltList: case GrammarParser.RULE_lexerAltList: case GrammarParser.RULE_altList: case GrammarParser.RULE_blockSet: case GrammarParser.RULE_lexerBlock: case GrammarParser.RULE_block: case GrammarParser.RULE_optionsSpec: case GrammarParser.RULE_tokensSpec: case GrammarParser.RULE_channelsSpec: case GrammarParser.RULE_modeSpec: case GrammarParser.RULE_delegateGrammars: case GrammarParser.RULE_actionBlock: case GrammarParser.RULE_elements: case GrammarParser.RULE_lexerElements: case GrammarParser.RULE_rules: //case GrammarParser.RULE_lexerCommands: return(AlignmentRequirements.PriorSibling); case GrammarParser.RULE_lexerAlt: GrammarParser.LexerAltContext lexerAltContext = ParseTrees.GetTypedRuleContext <GrammarParser.LexerAltContext>(ancestor); if (lexerAltContext != null && lexerAltContext.lexerCommands() != null && ParseTrees.IsAncestorOf(lexerAltContext.lexerCommands(), targetElement)) { return(AlignmentRequirements.PriorSibling); } else { return(AlignmentRequirements.UseAncestor); } case GrammarParser.RULE_labeledAlt: if (ParseTrees.GetTerminalNodeType(targetElement) == GrammarParser.POUND) { return(AlignmentRequirements.PriorSibling); } else { return(AlignmentRequirements.UseAncestor); } case GrammarParser.RULE_delegateGrammar: return(AlignmentRequirements.None); default: return(AlignmentRequirements.UseAncestor); } }