public static ElementSelector ParseAtomSelector(ITokenStream tokens) { var zero = tokens.Consume(); switch (zero) { case AsteriskToken _: return(new AnySelector()); case AmpersandToken _: return(new ContextSelector()); case StringToken str: return(new IdentifierSelector(str.Value)); case IdentifierToken id: return(new IdentifierSelector(id.Value)); case ScriptToken script: return(new ScriptSelector(script.Value)); case CallbackToken callback: return(new CallbackSelector(callback.Value)); default: throw new FormatException($"Expected selector identifier at {tokens.FormatPosition(zero)}."); } }
public static AndSelector ParseAndSelector(ITokenStream tokens) { var selectors = new List <ElementSelector> { ParseNotSelector(tokens) }; while (true) { var zero = tokens.Peek(); switch (zero) { case CaretToken _: tokens.Consume(); selectors.Add(ParseNotSelector(tokens)); break; case NotToken _: case AsteriskToken _: case AmpersandToken _: case StringToken _: case IdentifierToken _: case ScriptToken _: selectors.Add(ParseNotSelector(tokens)); break; default: return(new AndSelector(selectors)); } } }
public static ElementSelector ParseNotSelector(ITokenStream tokens) { var zero = tokens.Peek(); var inverse = false; if (zero is NotToken) { inverse = true; tokens.Consume(); } var atom = ParseAtomSelector(tokens); return(inverse ? new NotSelector(atom) : atom); }
public override void Recover(Parser recognizer, RecognitionException e) { for (; ;) { var p = stream.LA(1); if (p == -1) { break; } if (p != ReplParser.VWS) { stream.Consume(); } } base.Recover(recognizer, e); }
public TokenStreamVisualizerForm(ITokenStream tokenStream) { if (tokenStream == null) { throw new ArgumentNullException("tokenStream"); } InitializeComponent(); List <IToken> tokens = new List <IToken>(); int marker = tokenStream.Mark(); int currentPosition = tokenStream.Index; try { tokenStream.Seek(0); while (tokenStream.LA(1) != CharStreamConstants.EndOfFile) { tokenStream.Consume(); } for (int i = 0; i < tokenStream.Count; i++) { tokens.Add(tokenStream.Get(i)); } } finally { tokenStream.Rewind(marker); } this._tokenStream = tokenStream; this._tokens = tokens.ToArray(); if (tokenStream.TokenSource != null) { this._tokenNames = tokenStream.TokenSource.TokenNames; } this._tokenNames = this._tokenNames ?? new string[0]; UpdateTokenTypes(); UpdateHighlighting(); listBox1.BackColor = Color.Wheat; }
internal static Token DrainPeek(this ITokenStream tokens) { Token current; while (true) { current = tokens.Peek(); if (current is WhitespaceToken) { tokens.Consume(); } else { break; } } return(current); }
public override void Recover(Parser recognizer, RecognitionException e) { // This should should move the current position to the next 'END' token base.Recover(recognizer, e); ITokenStream tokenStream = (ITokenStream)recognizer.InputStream; // Verify we are where we expect to be if (tokenStream.La(1) == MyGrammarParser.END) { // Get the next possible tokens IntervalSet intervalSet = GetErrorRecoverySet(recognizer); // Move to the next token tokenStream.Consume(); // Move to the next possible token // If the errant element is the last in the set, this will move to the 'END' token in 'END MODULE'. // If there are subsequent elements in the set, this will move to the 'BEGIN' token in 'BEGIN module_element'. ConsumeUntil(recognizer, intervalSet); } }
public virtual void Consume() { if (initialStreamState) { ConsumeInitialHiddenTokens(); } int a = input.Index; IToken t = input.LT(1); input.Consume(); int b = input.Index; dbg.ConsumeToken(t); if (b > a + 1) { // then we consumed more than one token; must be off channel tokens for (int i = a + 1; i < b; i++) { dbg.ConsumeHiddenToken(input.Get(i)); } } }
public TokenStreamVisualizerForm( ITokenStream tokenStream ) { if (tokenStream == null) throw new ArgumentNullException("tokenStream"); InitializeComponent(); List<IToken> tokens = new List<IToken>(); int marker = tokenStream.Mark(); int currentPosition = tokenStream.Index; try { tokenStream.Seek(0); while (tokenStream.LA(1) != CharStreamConstants.EndOfFile) tokenStream.Consume(); for (int i = 0; i < tokenStream.Count; i++) tokens.Add(tokenStream.Get(i)); } finally { tokenStream.Rewind(marker); } this._tokenStream = tokenStream; this._tokens = tokens.ToArray(); if (tokenStream.TokenSource != null) this._tokenNames = tokenStream.TokenSource.TokenNames; this._tokenNames = this._tokenNames ?? new string[0]; UpdateTokenTypes(); UpdateHighlighting(); listBox1.BackColor = Color.Wheat; }
private static List <Token> ConsumeWhile(ITokenStream stream, Func <Token, bool> predicate, bool desiredValue) { var result = new List <Token>(); while (true) { var current = stream.Peek(); if (current == null) { throw new FormatException("Unexpected end of input."); } if (predicate(current) == desiredValue) { result.Add(stream.Consume()); } else { break; } } return(result); }
public static OrSelector ParseOrSelector(ITokenStream tokens, bool isNested) { var selectors = new List <ContainmentSelector> { ParseContainmentSelector(tokens, isNested) }; while (true) { var current = tokens.DrainPeek(); if (current is CommaToken) { tokens.Consume(); tokens.DrainPeek(); selectors.Add(ParseContainmentSelector(tokens, isNested)); } else { break; } } return(new OrSelector(selectors)); }
public void Advance() => _stream.Consume();
private void ProcessToken() { var tok = m_input.LT(1); int la1 = m_input.LA(1), la2 = m_input.LA(2); if (la1 == m_tokens.SELECT) { m_queryType = QueryType.SELECT; } if (la1 == m_tokens.UPDATE) { m_queryType = QueryType.UPDATE; } if (la1 == m_tokens.DELETE) { m_queryType = QueryType.DELETE; } if (la1 == m_tokens.INSERT) { m_queryType = QueryType.INSERT; } if (la1 == m_tokens.T_STRING) { var oldctx = m_context; SetPositionBegin(tok); SetContext(CodeContext.String); SetPositionEnd(tok); SetContext(oldctx); m_input.Consume(); return; } if (la1 == m_tokens.SELECT || la1 == m_tokens.WHERE || la1 == m_tokens.ON ) { SetPositionEnd(tok); SetContext(CodeContext.Column); m_input.Consume(); return; } if ((la1 == m_tokens.LPAREN && m_queryType == QueryType.INSERT) || la1 == m_tokens.SET ) { SetPositionEnd(tok); SetContext(CodeContext.ColumnWithoutQualifier); m_input.Consume(); return; } if (la1 == m_tokens.ORDER && la2 == m_tokens.BY || la1 == m_tokens.GROUP && la2 == m_tokens.BY) { SetPositionEnd(m_input.LT(2)); m_input.Consume(); m_input.Consume(); SetContext(CodeContext.Column); return; } if (la1 == m_tokens.FROM || la1 == m_tokens.JOIN || la1 == m_tokens.UPDATE || la1 == m_tokens.DELETE || la1 == m_tokens.INSERT ) { SetPositionEnd(tok); SetContext(CodeContext.Table); m_input.Consume(); return; } if (m_context == CodeContext.Table && m_tokens.IsIdent(la1)) { var name = new DepsName(); name.Components.Add(m_dialect.UnquoteName(tok.Text)); m_input.Consume(); while (m_input.LA(1) == m_tokens.DOT && m_tokens.IsIdent(m_input.LA(2))) { name.Components.Add(m_dialect.UnquoteName(m_input.LT(2).Text)); m_input.Consume(); m_input.Consume(); } var titem = new TableItem { Name = name }; if (m_tokens.IsIdent(m_input.LA(1))) { titem.Alias = m_dialect.UnquoteName(m_input.LT(1).Text); m_input.Consume(); } UsedTables.Add(titem); return; } // default token handling m_input.Consume(); SetPositionEnd(tok); }
// comes back with reach.UniqueAlt set to a valid alt protected int ExecATNWithFullContext(DFA dfa, DFAState D, // how far we got in SLL DFA before failing over ATNConfigSet s0, ITokenStream input, int startIndex, ParserRuleContext outerContext) { if (debug || debug_list_atn_decisions) { Console.WriteLine("execATNWithFullContext " + s0); } bool fullCtx = true; bool foundExactAmbig = false; ATNConfigSet reach = null; ATNConfigSet previous = s0; input.Seek(startIndex); int t = input.LA(1); int predictedAlt; while (true) { // while more work // Console.WriteLine("LL REACH "+GetLookaheadName(input)+ // " from configs.size="+previous.size()+ // " line "+input.LT(1)Line+":"+input.LT(1).Column); reach = ComputeReachSet(previous, t, fullCtx); if (reach == null) { // if any configs in previous dipped into outer context, that // means that input up to t actually finished entry rule // at least for LL decision. Full LL doesn't dip into outer // so don't need special case. // We will get an error no matter what so delay until after // decision; better error message. Also, no reachable target // ATN states in SLL implies LL will also get nowhere. // If conflict in states that dip out, choose min since we // will get error no matter what. NoViableAltException e = NoViableAlt(input, outerContext, previous, startIndex); input.Seek(startIndex); int alt = GetSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previous, outerContext); if (alt != ATN.INVALID_ALT_NUMBER) { return alt; } throw e; } ICollection<BitSet> altSubSets = PredictionMode.GetConflictingAltSubsets(reach.configs); if (debug) { Console.WriteLine("LL altSubSets=" + altSubSets + ", predict=" + PredictionMode.GetUniqueAlt(altSubSets) + ", ResolvesToJustOneViableAlt=" + PredictionMode.ResolvesToJustOneViableAlt(altSubSets)); } // Console.WriteLine("altSubSets: "+altSubSets); // System.err.println("reach="+reach+", "+reach.conflictingAlts); reach.uniqueAlt = GetUniqueAlt(reach); // unique prediction? if (reach.uniqueAlt != ATN.INVALID_ALT_NUMBER) { predictedAlt = reach.uniqueAlt; break; } if (mode != PredictionMode.LL_EXACT_AMBIG_DETECTION) { predictedAlt = PredictionMode.ResolvesToJustOneViableAlt(altSubSets); if (predictedAlt != ATN.INVALID_ALT_NUMBER) { break; } } else { // In exact ambiguity mode, we never try to terminate early. // Just keeps scarfing until we know what the conflict is if (PredictionMode.AllSubsetsConflict(altSubSets) && PredictionMode.AllSubsetsEqual(altSubSets)) { foundExactAmbig = true; predictedAlt = PredictionMode.GetSingleViableAlt(altSubSets); break; } // else there are multiple non-conflicting subsets or // we're not sure what the ambiguity is yet. // So, keep going. } previous = reach; if (t != IntStreamConstants.EOF) { input.Consume(); t = input.LA(1); } } // If the configuration set uniquely predicts an alternative, // without conflict, then we know that it's a full LL decision // not SLL. if (reach.uniqueAlt != ATN.INVALID_ALT_NUMBER) { ReportContextSensitivity(dfa, predictedAlt, reach, startIndex, input.Index); return predictedAlt; } // We do not check predicates here because we have checked them // on-the-fly when doing full context prediction. /* In non-exact ambiguity detection mode, we might actually be able to detect an exact ambiguity, but I'm not going to spend the cycles needed to check. We only emit ambiguity warnings in exact ambiguity mode. For example, we might know that we have conflicting configurations. But, that does not mean that there is no way forward without a conflict. It's possible to have nonconflicting alt subsets as in: LL altSubSets=[{1, 2}, {1, 2}, {1}, {1, 2}] from [(17,1,[5 $]), (13,1,[5 10 $]), (21,1,[5 10 $]), (11,1,[$]), (13,2,[5 10 $]), (21,2,[5 10 $]), (11,2,[$])] In this case, (17,1,[5 $]) indicates there is some next sequence that would resolve this without conflict to alternative 1. Any other viable next sequence, however, is associated with a conflict. We stop looking for input because no amount of further lookahead will alter the fact that we should predict alternative 1. We just can't say for sure that there is an ambiguity without looking further. */ ReportAmbiguity(dfa, D, startIndex, input.Index, foundExactAmbig, reach.GetAlts(), reach); return predictedAlt; }
/** Performs ATN simulation to compute a predicted alternative based * upon the remaining input, but also updates the DFA cache to avoid * having to traverse the ATN again for the same input sequence. There are some key conditions we're looking for after computing a new set of ATN configs (proposed DFA state): * if the set is empty, there is no viable alternative for current symbol * does the state uniquely predict an alternative? * does the state have a conflict that would prevent us from putting it on the work list? We also have some key operations to do: * add an edge from previous DFA state to potentially new DFA state, D, upon current symbol but only if adding to work list, which means in all cases except no viable alternative (and possibly non-greedy decisions?) * collecting predicates and adding semantic context to DFA accept states * adding rule context to context-sensitive DFA accept states * consuming an input symbol * reporting a conflict * reporting an ambiguity * reporting a context sensitivity * reporting insufficient predicates cover these cases: dead end single alt single alt + preds conflict conflict + preds */ protected int ExecATN(DFA dfa, DFAState s0, ITokenStream input, int startIndex, ParserRuleContext outerContext) { if (debug || debug_list_atn_decisions) { Console.WriteLine("execATN decision " + dfa.decision + " exec LA(1)==" + GetLookaheadName(input) + " line " + input.LT(1).Line + ":" + input.LT(1).Column); } DFAState previousD = s0; if (debug) Console.WriteLine("s0 = " + s0); int t = input.LA(1); while (true) { // while more work DFAState D = GetExistingTargetState(previousD, t); if (D == null) { D = ComputeTargetState(dfa, previousD, t); } if (D == ERROR) { // if any configs in previous dipped into outer context, that // means that input up to t actually finished entry rule // at least for SLL decision. Full LL doesn't dip into outer // so don't need special case. // We will get an error no matter what so delay until after // decision; better error message. Also, no reachable target // ATN states in SLL implies LL will also get nowhere. // If conflict in states that dip out, choose min since we // will get error no matter what. NoViableAltException e = NoViableAlt(input, outerContext, previousD.configSet, startIndex); input.Seek(startIndex); int alt = GetSynValidOrSemInvalidAltThatFinishedDecisionEntryRule(previousD.configSet, outerContext); if (alt != ATN.INVALID_ALT_NUMBER) { return alt; } throw e; } if (D.requiresFullContext && mode != PredictionMode.SLL) { // IF PREDS, MIGHT RESOLVE TO SINGLE ALT => SLL (or syntax error) BitSet conflictingAlts = D.configSet.conflictingAlts; if (D.predicates != null) { if (debug) Console.WriteLine("DFA state has preds in DFA sim LL failover"); int conflictIndex = input.Index; if (conflictIndex != startIndex) { input.Seek(startIndex); } conflictingAlts = EvalSemanticContext(D.predicates, outerContext, true); if (conflictingAlts.Cardinality() == 1) { if (debug) Console.WriteLine("Full LL avoided"); return conflictingAlts.NextSetBit(0); } if (conflictIndex != startIndex) { // restore the index so reporting the fallback to full // context occurs with the index at the correct spot input.Seek(conflictIndex); } } if (dfa_debug) Console.WriteLine("ctx sensitive state " + outerContext + " in " + D); bool fullCtx = true; ATNConfigSet s0_closure = ComputeStartState(dfa.atnStartState, outerContext, fullCtx); ReportAttemptingFullContext(dfa, conflictingAlts, D.configSet, startIndex, input.Index); int alt = ExecATNWithFullContext(dfa, D, s0_closure, input, startIndex, outerContext); return alt; } if (D.isAcceptState) { if (D.predicates == null) { return D.prediction; } int stopIndex = input.Index; input.Seek(startIndex); BitSet alts = EvalSemanticContext(D.predicates, outerContext, true); switch (alts.Cardinality()) { case 0: throw NoViableAlt(input, outerContext, D.configSet, startIndex); case 1: return alts.NextSetBit(0); default: // report ambiguity after predicate evaluation to make sure the correct // set of ambig alts is reported. ReportAmbiguity(dfa, D, startIndex, stopIndex, false, alts, D.configSet); return alts.NextSetBit(0); } } previousD = D; if (t != IntStreamConstants.EOF) { input.Consume(); t = input.LA(1); } } }
private static Token DrainConsume(this ITokenStream tokens) { tokens.DrainPeek(); return(tokens.Consume()); }
public static Token Consume(this ITokenStream stream) => stream.Consume(1)[0];
public static StylesheetStatement ParseStatement(ITokenStream tokens) { var zero = tokens.DrainPeek(); if (zero is ScriptToken scriptToken) { return(ParseScriptDeclaration(tokens)); } var one = tokens.PeekOnly(t => !(t is WhitespaceToken), 1); if (one is ColonToken) { tokens.Consume(); tokens.DrainConsume(); string key; switch (zero) { case IdentifierToken id: key = id.Value; break; case StringToken str: key = str.Value; break; default: throw new FormatException($"Expected an identifier or string at {tokens.FormatPosition(zero)}."); } var stringBuilder = new StringBuilder(); var ws = false; var counter = 0; foreach (var t in tokens .ConsumeUntil(t => t is SemiColonToken)) { switch (t) { case IdentifierToken id: stringBuilder.Append(id.Value); counter++; ws = false; continue; case StringToken str: stringBuilder.Append(str.Value); counter++; ws = false; continue; case WhitespaceToken _: if (counter > 0 && !ws) { stringBuilder.Append(" "); } ws = true; continue; default: throw new FormatException($"Invalid value at {tokens.FormatPosition(t)}."); } } if (counter == 0) { throw new FormatException($"Expected an assigned value to {tokens.FormatPosition(zero)}."); } tokens.Consume(); if (ws) { stringBuilder.Remove(stringBuilder.Length - 1, 1); } return(new AssignmentStatement(key, stringBuilder.ToString())); } return(ParseStyleDeclaration(tokens, true)); }