public DebugTokenStream(ITokenStream input, IDebugEventListener dbg) { this.input = input; DebugListener = dbg; // force TokenStream to get at least first valid token // so we know if there are any hidden tokens first in the stream input.LT(1); }
public bool addOpenBrace(ITokenStream input, int braceCode) { // if (input.LA(1)!=AS3_exLexer.LCURLY && input.LA(1)!=AS3_exLexer.RCURLY && (braceCode!=BraceAdd_Switch || (input.LA(1)!=AS3_exLexer.BREAK && input.LA(1)!=AS3_exLexer.CASE && input.LA(1)!=AS3_exLexer.DEFAULT))) { //if adding braces and the next statement is not a block statement addOpenBrace(input.LT(1), braceCode); // return true; } return true; }
public bool TryStepBackward() { if (_failedBackward || _beginningOfFile) { return(false); } if (_input.Index - _lookBehindPosition <= 0) { _beginningOfFile = true; return(false); } IToken token = _input.LT(-1 - _lookBehindPosition); if (token == null) { _beginningOfFile = true; return(false); } int symbol = token.Type; int symbolPosition = token.TokenIndex; /* * Update the non-deterministic trace */ Stopwatch updateTimer = Stopwatch.StartNew(); if (_lookAheadPosition == 0 && _lookBehindPosition == 0 && _contexts.Count == 0) { HashSet <InterpretTrace> initialContexts = new HashSet <InterpretTrace>(EqualityComparer <InterpretTrace> .Default); /* create our initial set of states as the ones at the target end of a match transition * that contains 'symbol' in the match set. */ List <Transition> transitions = new List <Transition>(_network.Transitions.Where(i => i.MatchesSymbol(symbol))); foreach (var transition in transitions) { if (ExcludedStartRules.Contains(Network.StateRules[transition.SourceState.Id])) { continue; } if (ExcludedStartRules.Contains(Network.StateRules[transition.TargetState.Id])) { continue; } ContextFrame startContext = new ContextFrame(transition.TargetState, null, null, this); ContextFrame endContext = new ContextFrame(transition.TargetState, null, null, this); initialContexts.Add(new InterpretTrace(startContext, endContext)); } _contexts.AddRange(initialContexts); #if DFA DeterministicState deterministicState = new DeterministicState(_contexts.Select(i => i.StartContext)); _deterministicTrace = new DeterministicTrace(deterministicState, deterministicState); #endif } List <InterpretTrace> existing = new List <InterpretTrace>(_contexts); _contexts.Clear(); SortedSet <int> states = new SortedSet <int>(); HashSet <InterpretTrace> contexts = new HashSet <InterpretTrace>(EqualityComparer <InterpretTrace> .Default); #if false HashSet <ContextFrame> existingUnique = new HashSet <ContextFrame>(existing.Select(i => i.StartContext), EqualityComparer <ContextFrame> .Default); Contract.Assert(existingUnique.Count == existing.Count); #endif foreach (var context in existing) { states.Add(context.StartContext.State.Id); StepBackward(contexts, states, context, symbol, symbolPosition, PreventContextType.None); states.Clear(); } bool success = false; if (contexts.Count > 0) { _contexts.AddRange(contexts); if (TrackBoundedContexts) { _boundedStartContexts.UnionWith(_contexts.Where(i => i.BoundedStart)); } success = true; } else { _contexts.AddRange(existing); } long nfaUpdateTime = updateTimer.ElapsedMilliseconds; #if DFA /* * Update the deterministic trace */ updateTimer.Restart(); DeterministicTransition deterministicTransition = _deterministicTrace.StartState.IncomingTransitions.SingleOrDefault(i => i.MatchSet.Contains(symbol)); if (deterministicTransition == null) { DeterministicState sourceState = new DeterministicState(contexts.Select(i => i.StartContext)); DeterministicState targetState = _deterministicTrace.StartState; deterministicTransition = targetState.IncomingTransitions.SingleOrDefault(i => i.SourceState.Equals(sourceState)); if (deterministicTransition == null) { deterministicTransition = new DeterministicTransition(targetState); sourceState.AddTransition(deterministicTransition); } deterministicTransition.MatchSet.Add(symbol); } IEnumerable <DeterministicTraceTransition> deterministicTransitions = Enumerable.Repeat(new DeterministicTraceTransition(deterministicTransition, symbol, symbolPosition, this), 1); deterministicTransitions = deterministicTransitions.Concat(_deterministicTrace.Transitions); _deterministicTrace = new DeterministicTrace(deterministicTransition.SourceState, _deterministicTrace.EndState, deterministicTransitions); long dfaUpdateTime = updateTimer.ElapsedMilliseconds; #endif if (success) { _lookBehindPosition++; } if (!success) { _failedBackward = true; } return(success); }
public virtual int AdaptivePredict(ITokenStream input, int decision, ParserRuleContext outerContext) { if (debug || debug_list_atn_decisions) { Console.WriteLine("adaptivePredict decision " + decision + " exec LA(1)==" + GetLookaheadName(input) + " line " + input.LT(1).Line + ":" + input.LT(1).Column); } this.input = input; startIndex = input.Index; context = outerContext; DFA dfa = decisionToDFA[decision]; thisDfa = dfa; int m = input.Mark(); int index = startIndex; // Now we are certain to have a specific decision's DFA // But, do we still need an initial state? try { DFAState s0; if (dfa.IsPrecedenceDfa) { // the start state for a precedence DFA depends on the current // parser precedence, and is provided by a DFA method. s0 = dfa.GetPrecedenceStartState(parser.Precedence); } else { // the start state for a "regular" DFA is just s0 s0 = dfa.s0; } if (s0 == null) { if (outerContext == null) outerContext = ParserRuleContext.EmptyContext; if (debug || debug_list_atn_decisions) { Console.WriteLine("predictATN decision " + dfa.decision + " exec LA(1)==" + GetLookaheadName(input) + ", outerContext=" + outerContext.ToString(parser)); } bool fullCtx = false; ATNConfigSet s0_closure = ComputeStartState(dfa.atnStartState, ParserRuleContext.EmptyContext, fullCtx); if (dfa.IsPrecedenceDfa) { /* If this is a precedence DFA, we use applyPrecedenceFilter * to convert the computed start state to a precedence start * state. We then use DFA.setPrecedenceStartState to set the * appropriate start state for the precedence level rather * than simply setting DFA.s0. */ dfa.s0.configSet = s0_closure; // not used for prediction but useful to know start configs anyway s0_closure = ApplyPrecedenceFilter(s0_closure); s0 = AddDFAState(dfa, new DFAState(s0_closure)); dfa.SetPrecedenceStartState(parser.Precedence, s0); } else { s0 = AddDFAState(dfa, new DFAState(s0_closure)); dfa.s0 = s0; } } int alt = ExecATN(dfa, s0, input, index, outerContext); if (debug) Console.WriteLine("DFA after predictATN: " + dfa.ToString(parser.Vocabulary)); return alt; } finally { mergeCache = null; // wack cache after each prediction thisDfa = null; input.Seek(index); input.Release(m); } }
protected NoViableAltException NoViableAlt(ITokenStream input, ParserRuleContext outerContext, ATNConfigSet configs, int startIndex) { return new NoViableAltException(parser, input, input.Get(startIndex), input.LT(1), configs, outerContext); }
/** Performs ATN simulation to compute a predicted alternative based * upon the remaining input, but also updates the DFA cache to avoid * having to traverse the ATN again for the same input sequence. There are some key conditions we're looking for after computing a new set of ATN configs (proposed DFA state): * if the set is empty, there is no viable alternative for current symbol * does the state uniquely predict an alternative? * does the state have a conflict that would prevent us from putting it on the work list? We also have some key operations to do: * add an edge from previous DFA state to potentially new DFA state, D, upon current symbol but only if adding to work list, which means in all cases except no viable alternative (and possibly non-greedy decisions?) * collecting predicates and adding semantic context to DFA accept states * adding rule context to context-sensitive DFA accept states * consuming an input symbol * reporting a conflict * reporting an ambiguity * reporting a context sensitivity * reporting insufficient predicates cover these cases: dead end single alt single alt + preds conflict conflict + preds */ protected int ExecATN(DFA dfa, DFAState s0, ITokenStream input, int startIndex, ParserRuleContext outerContext) { if (debug || debug_list_atn_decisions) { Console.WriteLine("execATN decision " + dfa.decision + " exec LA(1)==" + GetLookaheadName(input) + " line " + input.LT(1).Line + ":" + input.LT(1).Column); } DFAState previousD = s0; if (debug) Console.WriteLine("s0 = " + s0); int t = input.LA(1); while (true) { // while more work DFAState D = GetExistingTargetState(previousD, t); if (D == null) { D = ComputeTargetState(dfa, previousD, t); } if (D == ERROR) { // if any configs in previous dipped into outer context, that // means that input up to t actually finished entry rule // at least for SLL decision. Full LL doesn't dip into outer // so don't need special case. // We will get an error no matter what so delay until after // decision; better error message. Also, no reachable target // ATN states in SLL implies LL will also get nowhere. // If conflict in states that dip out, choose min since we // will get error no matter what. NoViableAltException e = NoViableAlt(input, outerContext, previousD.configSet, startIndex); input.Seek(startIndex); int alt = GetSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configSet, outerContext); if (alt != ATN.INVALID_ALT_NUMBER) { return alt; } throw e; } if (D.requiresFullContext && mode != PredictionMode.SLL) { // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error) BitSet conflictingAlts = D.configSet.conflictingAlts; if (D.predicates != null) { if (debug) Console.WriteLine("DFA state has preds in DFA sim LL failover"); int conflictIndex = input.Index; if (conflictIndex != startIndex) { input.Seek(startIndex); } conflictingAlts = EvalSemanticContext(D.predicates, outerContext, true); if (conflictingAlts.Cardinality() == 1) { if (debug) Console.WriteLine("Full LL avoided"); return conflictingAlts.NextSetBit(0); } if (conflictIndex != startIndex) { // restore the index so reporting the fallback to full // context occurs with the index at the correct spot input.Seek(conflictIndex); } } if (dfa_debug) Console.WriteLine("ctx sensitive state " + outerContext + " in " + D); bool fullCtx = true; ATNConfigSet s0_closure = ComputeStartState(dfa.atnStartState, outerContext, fullCtx); ReportAttemptingFullContext(dfa, conflictingAlts, D.configSet, startIndex, input.Index); int alt = ExecATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext); return alt; } if (D.isAcceptState) { if (D.predicates == null) { return D.prediction; } int stopIndex = input.Index; input.Seek(startIndex); BitSet alts = EvalSemanticContext(D.predicates, outerContext, true); switch (alts.Cardinality()) { case 0: throw NoViableAlt(input, outerContext, D.configSet, startIndex); case 1: return alts.NextSetBit(0); default: // report ambiguity after predicate evaluation to make sure the correct // set of ambig alts is reported. ReportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configSet); return alts.NextSetBit(0); } } previousD = D; if (t != IntStreamConstants.EOF) { input.Consume(); t = input.LA(1); } } }
/// <summary> /// Whether the previous token value equals to str /// </summary> /// <returns>bool</returns> protected bool prev(string str) { return(_input.LT(-1).Text.Equals(str)); }
public virtual void TraceIn(string ruleName, int ruleIndex) { base.TraceIn(ruleName, ruleIndex, input.LT(1)); }
private IToken LT(ITokenStream stream, int k) { return(stream.LT(k)); }
private void ProcessToken() { var tok = m_input.LT(1); int la1 = m_input.LA(1), la2 = m_input.LA(2); if (la1 == m_tokens.SELECT) { m_queryType = QueryType.SELECT; } if (la1 == m_tokens.UPDATE) { m_queryType = QueryType.UPDATE; } if (la1 == m_tokens.DELETE) { m_queryType = QueryType.DELETE; } if (la1 == m_tokens.INSERT) { m_queryType = QueryType.INSERT; } if (la1 == m_tokens.T_STRING) { var oldctx = m_context; SetPositionBegin(tok); SetContext(CodeContext.String); SetPositionEnd(tok); SetContext(oldctx); m_input.Consume(); return; } if (la1 == m_tokens.SELECT || la1 == m_tokens.WHERE || la1 == m_tokens.ON ) { SetPositionEnd(tok); SetContext(CodeContext.Column); m_input.Consume(); return; } if ((la1 == m_tokens.LPAREN && m_queryType == QueryType.INSERT) || la1 == m_tokens.SET ) { SetPositionEnd(tok); SetContext(CodeContext.ColumnWithoutQualifier); m_input.Consume(); return; } if (la1 == m_tokens.ORDER && la2 == m_tokens.BY || la1 == m_tokens.GROUP && la2 == m_tokens.BY) { SetPositionEnd(m_input.LT(2)); m_input.Consume(); m_input.Consume(); SetContext(CodeContext.Column); return; } if (la1 == m_tokens.FROM || la1 == m_tokens.JOIN || la1 == m_tokens.UPDATE || la1 == m_tokens.DELETE || la1 == m_tokens.INSERT ) { SetPositionEnd(tok); SetContext(CodeContext.Table); m_input.Consume(); return; } if (m_context == CodeContext.Table && m_tokens.IsIdent(la1)) { var name = new DepsName(); name.Components.Add(m_dialect.UnquoteName(tok.Text)); m_input.Consume(); while (m_input.LA(1) == m_tokens.DOT && m_tokens.IsIdent(m_input.LA(2))) { name.Components.Add(m_dialect.UnquoteName(m_input.LT(2).Text)); m_input.Consume(); m_input.Consume(); } var titem = new TableItem { Name = name }; if (m_tokens.IsIdent(m_input.LA(1))) { titem.Alias = m_dialect.UnquoteName(m_input.LT(1).Text); m_input.Consume(); } UsedTables.Add(titem); return; } // default token handling m_input.Consume(); SetPositionEnd(tok); }