public DiscreteSet(IDiscreteDimension dimension, IntervalSet intervals) : base(dimension) { _intervals = intervals; }
public ContinuousSet(IContinuousDimension dimension, string caption) : base(dimension, caption) { _intervals = new IntervalSet(dimension); }
public NotSetTransition(ATNState target, IntervalSet set) : base(target, set) { }
/// <summary> /// Walks the ATN for a single rule only. It returns the token stream position for each path that could be matched in this rule. /// The result can be empty in case we hit only non-epsilon transitions that didn't match the current input or if we /// hit the caret position. /// </summary> private ISet <int> ProcessRule(ATNState startState, int tokenIndex, LinkedList <int> callStack, string indentation) { // Start with rule specific handling before going into the ATN walk. // Check first if we've taken this path with the same input before. if (!this.shortcutMap.TryGetValue(startState.ruleIndex, out var positionMap)) { positionMap = new Dictionary <int, ISet <int> >(); this.shortcutMap[startState.ruleIndex] = positionMap; } else { if (positionMap.ContainsKey(tokenIndex)) { return(positionMap[tokenIndex]); } } var result = new HashSet <int>(); // For rule start states we determine and cache the follow set, which gives us 3 advantages: // 1) We can quickly check if a symbol would be matched when we follow that rule. We can so check in advance // and can save us all the intermediate steps if there is no match. // 2) We'll have all symbols that are collectable already together when we are at the caret when entering a rule. // 3) We get this lookup for free with any 2nd or further visit of the same rule, which often happens // in non trivial grammars, especially with (recursive) expressions and of course when invoking code completion // multiple times. if (!this.followSetsByATN.TryGetValue(this.parser.GetType().Name, out var setsPerState)) { setsPerState = new FollowSetsPerState(); this.followSetsByATN[this.parser.GetType().Name] = setsPerState; } if (!setsPerState.TryGetValue(startState.stateNumber, out var followSets)) { followSets = new FollowSetsHolder(); setsPerState[startState.stateNumber] = followSets; var stop = this.atn.ruleToStopState[startState.ruleIndex]; followSets.Sets = this.DetermineFollowSets(startState, stop).ToList(); // Sets are split by path to allow translating them to preferred rules. But for quick hit tests // it is also useful to have a set with all symbols combined. var combined = new IntervalSet(); foreach (var set in followSets.Sets) { combined.AddAll(set.Intervals); } followSets.Combined = combined; } callStack.AddLast(startState.ruleIndex); var currentSymbol = this.tokens[tokenIndex].Type; if (tokenIndex >= this.tokens.Count - 1) { // At caret? if (this.preferredRules.Contains(startState.ruleIndex)) { // No need to go deeper when collecting entries and we reach a rule that we want to collect anyway. this.TranslateToRuleIndex(callStack.ToList()); } else { // Convert all follow sets to either single symbols or their associated preferred rule and add // the result to our candidates list. foreach (var set in followSets.Sets) { var fullPath = new LinkedList <int>(callStack); foreach (var item in set.Path) { fullPath.AddLast(item); } if (!this.TranslateToRuleIndex(fullPath.ToList())) { foreach (var symbol in set.Intervals.ToList()) { if (!this.ignoredTokens.Contains(symbol)) { if (!this.candidates.Tokens.ContainsKey(symbol)) { // Following is empty if there is more than one entry in the set. this.candidates.Tokens[symbol] = set.Following; } else { // More than one following list for the same symbol. if (!this.candidates.Tokens[symbol].SequenceEqual(set.Following)) { this.candidates.Tokens[symbol] = new List <int>(); } } } } } } } callStack.RemoveLast(); return(result); } else { // Process the rule if we either could pass it without consuming anything (epsilon transition) // or if the current input symbol will be matched somewhere after this entry point. // Otherwise stop here. if (!followSets.Combined.Contains(TokenConstants.Epsilon) && !followSets.Combined.Contains(currentSymbol)) { callStack.RemoveLast(); return(result); } } // The current state execution pipeline contains all yet-to-be-processed ATN states in this rule. // For each such state we store the token index + a list of rules that lead to it. var statePipeline = new LinkedList <PipelineEntry>(); // Bootstrap the pipeline. statePipeline.AddLast(new PipelineEntry(startState, tokenIndex)); PipelineEntry currentEntry; while (statePipeline.Count() > 0) { currentEntry = statePipeline.Last(); statePipeline.RemoveLast(); ++this.statesProcessed; currentSymbol = this.tokens[currentEntry.TokenIndex].Type; var atCaret = currentEntry.TokenIndex >= this.tokens.Count - 1; switch (currentEntry.State.StateType) { // Happens only for the first state in this rule, not subrules. case StateType.RuleStart: indentation += " "; break; // Record the token index we are at, to report it to the caller. case StateType.RuleStop: result.Add(currentEntry.TokenIndex); continue; default: break; } var transitions = currentEntry.State.Transitions; foreach (var transition in transitions) { switch (transition.TransitionType) { case TransitionType.Rule: { var endStatus = this.ProcessRule(transition.target, currentEntry.TokenIndex, callStack, indentation); foreach (var position in endStatus) { statePipeline.AddLast(new PipelineEntry(((RuleTransition)transition).followState, position)); } break; } case TransitionType.Predicate: { if (this.CheckPredicate((PredicateTransition)transition)) { statePipeline.AddLast(new PipelineEntry(transition.target, currentEntry.TokenIndex)); } break; } case TransitionType.Wildcard: { if (atCaret) { if (!this.TranslateToRuleIndex(callStack.ToList())) { foreach (var token in IntervalSet.Of(TokenConstants.MinUserTokenType, this.atn.maxTokenType).ToList()) { if (!this.ignoredTokens.Contains(token)) { this.candidates.Tokens[token] = new List <int>(); } } } } else { statePipeline.AddLast(new PipelineEntry(transition.target, currentEntry.TokenIndex + 1)); } break; } default: { if (transition.IsEpsilon) { // Jump over simple states with a single outgoing epsilon transition. statePipeline.AddLast(new PipelineEntry(transition.target, currentEntry.TokenIndex)); continue; } var set = transition.Label; if (set != null && set.Count > 0) { if (transition.TransitionType == TransitionType.NotSet) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, this.atn.maxTokenType)); } if (atCaret) { if (!this.TranslateToRuleIndex(callStack.ToList())) { var list = set.ToList(); var isAddFollowing = list.Count == 1; foreach (var symbol in list) { if (!this.ignoredTokens.Contains(symbol)) { if (isAddFollowing) { this.candidates.Tokens[symbol] = this.GetFollowingTokens(transition); } else { this.candidates.Tokens[symbol] = new List <int>(); } } } } } else { if (set.Contains(currentSymbol)) { statePipeline.AddLast(new PipelineEntry(transition.target, currentEntry.TokenIndex + 1)); } } } } break; } } } callStack.RemoveLast(); // Cache the result, for later lookup to avoid duplicate walks. positionMap[tokenIndex] = result; return(result); }
/// <summary> /// Compute set of tokens that can follow /// <code>s</code> /// in the ATN in the /// specified /// <code>ctx</code> /// . /// <p/> /// If /// <code>ctx</code> /// is /// <see cref="PredictionContext.EmptyLocal">PredictionContext.EmptyLocal</see> /// and /// <code>stopState</code> /// or the end of the rule containing /// <code>s</code> /// is reached, /// <see cref="TokenConstants.Epsilon"/> /// is added to the result set. If /// <code>ctx</code> /// is not /// <see cref="PredictionContext.EmptyLocal">PredictionContext.EmptyLocal</see> /// and /// <code>addEOF</code> /// is /// <code>true</code> /// and /// <code>stopState</code> /// or the end of the outermost rule is reached, /// <see cref="TokenConstants.Eof"/> /// is added to the result set. /// </summary> /// <param name="s">the ATN state.</param> /// <param name="stopState"> /// the ATN state to stop at. This can be a /// <see cref="BlockEndState">BlockEndState</see> /// to detect epsilon paths through a closure. /// </param> /// <param name="ctx"> /// The outer context, or /// <see cref="PredictionContext.EmptyLocal">PredictionContext.EmptyLocal</see> /// if /// the outer context should not be used. /// </param> /// <param name="look">The result lookahead set.</param> /// <param name="lookBusy"> /// A set used for preventing epsilon closures in the ATN /// from causing a stack overflow. Outside code should pass /// <code>new HashSet<ATNConfig></code> /// for this argument. /// </param> /// <param name="calledRuleStack"> /// A set used for preventing left recursion in the /// ATN from causing a stack overflow. Outside code should pass /// <code>new BitSet()</code> /// for this argument. /// </param> /// <param name="seeThruPreds"> /// /// <code>true</code> /// to true semantic predicates as /// implicitly /// <code>true</code> /// and "see through them", otherwise /// <code>false</code> /// to treat semantic predicates as opaque and add /// <see cref="HitPred">HitPred</see> /// to the /// result if one is encountered. /// </param> /// <param name="addEOF"> /// Add /// <see cref="TokenConstants.Eof"/> /// to the result if the end of the /// outermost context is reached. This parameter has no effect if /// <code>ctx</code> /// is /// <see cref="PredictionContext.EmptyLocal">PredictionContext.EmptyLocal</see> /// . /// </param> protected internal virtual void Look(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet <ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF) { // System.out.println("_LOOK("+s.stateNumber+", ctx="+ctx); ATNConfig c = ATNConfig.Create(s, 0, ctx); if (!lookBusy.Add(c)) { return; } if (s == stopState) { if (PredictionContext.IsEmptyLocal(ctx)) { look.Add(TokenConstants.Epsilon); return; } else { if (ctx.IsEmpty && addEOF) { look.Add(TokenConstants.Eof); return; } } } if (s is RuleStopState) { if (PredictionContext.IsEmptyLocal(ctx)) { look.Add(TokenConstants.Epsilon); return; } else { if (ctx.IsEmpty && addEOF) { look.Add(TokenConstants.Eof); return; } } for (int i = 0; i < ctx.Size; i++) { if (ctx.GetReturnState(i) != PredictionContext.EmptyFullStateKey) { ATNState returnState = atn.states[ctx.GetReturnState(i)]; // System.out.println("popping back to "+retState); for (int j = 0; j < ctx.Size; j++) { bool removed = calledRuleStack.Get(returnState.ruleIndex); try { calledRuleStack.Clear(returnState.ruleIndex); Look(returnState, stopState, ctx.GetParent(j), look, lookBusy, calledRuleStack, seeThruPreds , addEOF); } finally { if (removed) { calledRuleStack.Set(returnState.ruleIndex); } } } return; } } } int n = s.NumberOfTransitions; for (int i_1 = 0; i_1 < n; i_1++) { Transition t = s.Transition(i_1); if (t.GetType() == typeof(RuleTransition)) { if (calledRuleStack.Get(((RuleTransition)t).target.ruleIndex)) { continue; } PredictionContext newContext = ctx.GetChild(((RuleTransition)t).followState.stateNumber ); try { calledRuleStack.Set(((RuleTransition)t).target.ruleIndex); Look(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds , addEOF); } finally { calledRuleStack.Clear(((RuleTransition)t).target.ruleIndex); } } else { if (t is AbstractPredicateTransition) { if (seeThruPreds) { Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF ); } else { look.Add(HitPred); } } else { if (t.IsEpsilon) { Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF ); } else { if (t.GetType() == typeof(WildcardTransition)) { look.AddAll(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType)); } else { // System.out.println("adding "+ t); IntervalSet set = t.Label; if (set != null) { if (t is NotSetTransition) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType )); } look.AddAll(set); } } } } } } }
// Step to state and continue parsing input. // Returns a list of transitions leading to a state that accepts input. private List <List <Edge> > EnterState(List <int> p, Edge t, int indent) { int here = ++entry_value; int index_on_transition = t._index_at_transition; int token_index = t._index; ATNState state = t._to; var rule_match = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var start_rule = rule_match.Any() ? rule_match.FirstOrDefault().ruleIndex : -1; var q = p.ToList(); if (start_rule >= 0) { q.Add(start_rule); } // Upon reaching the cursor, return match. bool at_match = token_index == _cursor; if (at_match) { if (_log_parse) { System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } IToken input_token = _input[token_index]; if (_log_parse) { var name = (start_rule >= 0) ? (" " + _parser.RuleNames[start_rule]) : ""; System.Console.Error.WriteLine( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text ); } bool at_match2 = input_token.TokenIndex >= _cursor; if (at_match2) { if (_log_parse) { System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } if (_visited.ContainsKey(new Tuple <ATNState, int, int>(state, token_index, indent))) { if (_visited_extra.ContainsKey(new Tuple <ATNState, int, int, List <int> >( state, token_index, indent, p))) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "already visited."); } return(null); } } _visited_extra[new Tuple <ATNState, int, int, List <int> >(state, token_index, indent, q)] = true; _visited[new Tuple <ATNState, int, int>(state, token_index, indent)] = true; List <List <Edge> > result = new List <List <Edge> >(); if (_stop_states.Contains(state)) { if (_log_parse) { var n = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var r = n.Any() ? n.FirstOrDefault().ruleIndex : -1; var name = (r >= 0) ? (" " + _parser.RuleNames[r]) : ""; System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text + " return "); } List <List <Edge> > res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { string str = PrintResult(res); System.Console.Error.WriteLine( str); } return(res); } // Search all transitions from state. foreach (Transition transition in state.TransitionsArray) { List <List <Edge> > matches = null; switch (transition.TransitionType) { case TransitionType.RULE: { RuleTransition rule = (RuleTransition)transition; ATNState sub_state = rule.target; matches = EnterState(q, new Edge() { _from = state, _to = rule.target, _follow = rule.followState, _label = rule.Label, _type = rule.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } if (matches != null) { List <List <Edge> > new_matches = new List <List <Edge> >(); foreach (List <Edge> match in matches) { Edge f = match.First(); // "to" is possibly final state of submachine. Edge l = match.Last(); // "to" is start state of submachine. bool is_final = _stop_states.Contains(f._to); bool is_at_caret = f._index >= _cursor; if (!is_final) { new_matches.Add(match); } else { List <List <Edge> > xxx = EnterState(q, new Edge() { _from = f._to, _to = rule.followState, _label = null, _type = TransitionType.EPSILON, _index = f._index, _index_at_transition = f._index }, indent + 1); if (xxx != null && xxx.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } if (xxx != null) { foreach (List <Edge> y in xxx) { List <Edge> copy = y.ToList(); foreach (Edge g in match) { copy.Add(g); } new_matches.Add(copy); } } } } matches = new_matches; } } break; case TransitionType.PREDICATE: if (CheckPredicate((PredicateTransition)transition)) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } break; case TransitionType.WILDCARD: matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } break; default: if (transition.IsEpsilon) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } else { IntervalSet set = transition.Label; if (set != null && set.Count > 0) { if (transition.TransitionType == TransitionType.NOT_SET) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, _parser.Atn.maxTokenType)); } if (set.Contains(input_token.Type)) { matches = EnterState(q, new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }, indent + 1); if (matches != null && matches.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "throwing exception."); } throw new Exception(); } } } } break; } if (matches != null) { foreach (List <Edge> match in matches) { List <Edge> x = match.ToList(); if (t != null) { x.Add(t); Edge prev = null; foreach (Edge z in x) { ATNState ff = z._to; if (prev != null) { if (prev._from != ff) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "Fail " + PrintSingle(x)); Debug.Assert(false); } } prev = z; } } result.Add(x); } } } if (result.Count == 0) { if (_log_parse) { System.Console.Error.WriteLine( new String(' ', indent * 2) + "result empty."); } return(null); } if (_log_parse) { var n = _parser.Atn.ruleToStartState.Where(v => v.stateNumber == state.stateNumber); var r = n.Any() ? n.FirstOrDefault().ruleIndex : -1; var name = (r >= 0) ? (" " + _parser.RuleNames[r]) : ""; System.Console.Error.Write( new String(' ', indent * 2) + "Entry " + here + " State " + state + name + " tokenIndex " + token_index + " " + input_token.Text + " return "); string str = PrintResult(result); System.Console.Error.WriteLine( str); } return(result); }
/// <summary> /// Creates fuzzy set for the specified dimension and explicit set of memebership function, each one for a distinct interval. /// </summary> /// <param name="dimension">Dimenstion of the fuzzy set</param> /// <param name="intervals">Explicit set of memebership function, each one for a distinct interval.</param> public FuzzySet(IDimension dimension, IntervalSet intervals) { _dimension = dimension; _intervals = intervals; }
/** From a set of edgeset->list-of-alts mappings, create a DFA * that uses syn preds for all |list-of-alts|>1. */ public LL1DFA(int decisionNumber, NFAState decisionStartState, MultiMap <IntervalSet, int> edgeMap) : base(decisionNumber, decisionStartState) { DFAState s0 = NewState(); StartState = s0; UnreachableAlts.Clear(); foreach (var edgeVar in edgeMap) { IntervalSet edge = edgeVar.Key; IList <int> alts = edgeVar.Value; alts = alts.OrderBy(i => i).ToList(); //Collections.sort( alts ); // make sure alts are attempted in order //[email protected](edge+" -> "+alts); DFAState s = NewState(); s.LookaheadDepth = 1; Label e = GetLabelForSet(edge); s0.AddTransition(s, e); if (alts.Count == 1) { s.IsAcceptState = true; int alt = alts[0]; SetAcceptState(alt, s); s.CachedUniquelyPredicatedAlt = alt; } else { // resolve with syntactic predicates. Add edges from // state s that test predicates. s.IsResolvedWithPredicates = true; for (int i = 0; i < alts.Count; i++) { int alt = (int)alts[i]; s.CachedUniquelyPredicatedAlt = NFA.INVALID_ALT_NUMBER; DFAState predDFATarget = GetAcceptState(alt); if (predDFATarget == null) { predDFATarget = NewState(); // create if not there. predDFATarget.IsAcceptState = true; predDFATarget.CachedUniquelyPredicatedAlt = alt; SetAcceptState(alt, predDFATarget); } // add a transition to pred target from d /* * int walkAlt = * decisionStartState.translateDisplayAltToWalkAlt(alt); * NFAState altLeftEdge = nfa.grammar.getNFAStateForAltOfDecision(decisionStartState, walkAlt); * NFAState altStartState = (NFAState)altLeftEdge.transition[0].target; * SemanticContext ctx = nfa.grammar.ll1Analyzer.getPredicates(altStartState); * [email protected]("sem ctx = "+ctx); * if ( ctx == null ) { * ctx = new SemanticContext.TruePredicate(); * } * s.addTransition(predDFATarget, new Label(ctx)); */ SemanticContext.Predicate synpred = GetSynPredForAlt(decisionStartState, alt); if (synpred == null) { synpred = new SemanticContext.TruePredicate(); } s.AddTransition(predDFATarget, new PredicateLabel(synpred)); } } } //[email protected]("dfa for preds=\n"+this); }
protected virtual Label GetLabelForSet( IntervalSet edgeSet ) { Label e = null; int atom = edgeSet.GetSingleElement(); if ( atom != Label.INVALID ) { e = new Label( atom ); } else { e = new Label( edgeSet ); } return e; }
internal override void Operate(Interval operand1, ref IntervalSet output) { Polynomial result = ((new Polynomial(1)) - operand1.Polynomial); output.AddInterval(new Interval(output, operand1.LowerBound, operand1.UpperBound, result)); }
protected override void BindRulesImpl() { base.BindRulesImpl(); _definitionSourceSet = Bindings.SymbolDefinitionIdentifier.StartState.GetSourceSet(); _referenceSourceSet = Bindings.SymbolReferenceIdentifier.StartState.GetSourceSet(); _definitionOnlySourceSet = _definitionSourceSet.Except(_referenceSourceSet); _referenceOnlySourceSet = _referenceSourceSet.Except(_definitionSourceSet); _definitionFollowSet = Bindings.SymbolDefinitionIdentifier.EndState.GetFollowSet(); _referenceFollowSet = Bindings.SymbolReferenceIdentifier.EndState.GetFollowSet(); _definitionOnlyFollowSet = _definitionFollowSet.Except(_referenceFollowSet); _referenceOnlyFollowSet = _referenceFollowSet.Except(_definitionFollowSet); }
/// <summary> /// Method to be invoked recursively to build the whole tree /// </summary> /// <param name="nodeCollection"></param> /// <param name="pictureBox"></param> /// <param name="label"></param> protected void buildSubTree(FuzzyRelation subrelation, TreeNodeCollection nodeCollection, PictureBox pictureBox, Label label) { TreeNode tnThis; if (subrelation is FuzzySet) { FuzzySet fs = (FuzzySet)subrelation; tnThis = new TreeNode(); if (!String.IsNullOrEmpty(fs.Caption)) { tnThis.Text = fs.Caption; } else { tnThis.Text = "Fuzzy Set"; } tnThis.ImageKey = "fuzzySet"; tnThis.SelectedImageKey = "fuzzySet"; TreeNode tnDimType = new TreeNode("Type: " + (fs.Dimensions[0] is IContinuousDimension ? "Continuous" : "Discrete")); tnDimType.ImageKey = "dimensionType"; tnDimType.SelectedImageKey = "dimensionType"; tnThis.Nodes.Add(tnDimType); } else { NodeFuzzyRelation nfr = (NodeFuzzyRelation)subrelation; tnThis = new TreeNode("Multidimensional Relation"); tnThis.ImageKey = "nodeFuzzyRelation"; tnThis.SelectedImageKey = "nodeFuzzyRelation"; TreeNode tnSubrelations = new TreeNode(nfr.Operator.Caption); tnSubrelations.ImageKey = "subrelations"; tnSubrelations.SelectedImageKey = "subrelations"; tnSubrelations.ForeColor = OperatorFontColor; tnThis.Nodes.Add(tnSubrelations); //Find all operands. Several commutative operands of same type from different nested levels will be displayed together List <FuzzyRelation> nestedSubrelations = new List <FuzzyRelation>(); findNestedOperands(nfr, nestedSubrelations); foreach (FuzzyRelation nestedSubrelation in nestedSubrelations) { buildSubTree(nestedSubrelation, tnSubrelations.Nodes, pictureBox, label); } } #region Dimensions TreeNode tnDimensions = new TreeNode("Dimension" + ((subrelation.Dimensions.Count() > 1) ? "s" : "")); tnDimensions.ImageKey = "dimensions"; tnDimensions.SelectedImageKey = "dimensions"; tnThis.Nodes.Add(tnDimensions); foreach (IDimension dimension in subrelation.Dimensions) { bool blnKnown = _inputs.ContainsKey(dimension); bool blnContinuous = dimension is IContinuousDimension; Color fontColor; string strDimCaption = String.IsNullOrEmpty(dimension.Name) ? "Dimension" : dimension.Name; if (blnKnown) { if (blnContinuous) { strDimCaption += String.Format("={0:F5} {1}", _inputs[dimension], ((IContinuousDimension)dimension).Unit); } else { IDiscreteDimension discreteDim = (IDiscreteDimension)dimension; if (discreteDim.DefaultSet != null) { strDimCaption += "=" + discreteDim.DefaultSet.GetMember(_inputs[dimension]).Caption; } else { strDimCaption += String.Format("=#{0:F0}", _inputs[dimension]); } } fontColor = SpecifiedDimensionFontColor; } else { fontColor = UnspecifiedDimensionFontColor; } if (dimension == _variableDimension) { fontColor = VariableDimensionFontColor; } string imageKey = String.Format("dimension{0}{1}", blnContinuous ? "Continuous" : "Discrete", blnKnown ? "Known" : "Unknown"); TreeNode tnDimension = new TreeNode(strDimCaption); tnDimension.ImageKey = imageKey; tnDimension.SelectedImageKey = imageKey; tnDimension.ForeColor = fontColor; addToolTip(tnDimension, dimension.Description); tnDimensions.Nodes.Add(tnDimension); } #endregion #region Function if (allInputDimensionsAvailable(subrelation)) { IDimension realVariableDimension; if (subrelation.Dimensions.Count() == 1) { realVariableDimension = subrelation.Dimensions[0]; } else { realVariableDimension = _variableDimension; } Dictionary <IDimension, decimal> copyInputs = new Dictionary <IDimension, decimal>(_inputs); foreach (KeyValuePair <IDimension, decimal> item in _inputs) { if (!subrelation.Dimensions.Contains(item.Key)) { copyInputs.Remove(item.Key); } } if (copyInputs.ContainsKey(realVariableDimension)) { copyInputs.Remove(realVariableDimension); } if (subrelation.Dimensions.Count() > copyInputs.Count()) { IntervalSet intervals = subrelation.GetFunction(copyInputs); string strIntervals = intervals.ToString(); string[] arrLines = strIntervals.Split(new char[] { '\n' }); TreeNode tnFunction = new TreeNode("Function"); tnFunction.ImageKey = "function"; tnFunction.SelectedImageKey = "function"; foreach (string line in arrLines) { if (!String.IsNullOrWhiteSpace(line)) { TreeNode tnLine = new TreeNode(line); tnLine.ImageKey = "spacer"; tnLine.SelectedImageKey = "spacer"; tnFunction.Nodes.Add(tnLine); } } tnThis.Nodes.Add(tnFunction); } } #endregion tnThis.ForeColor = MainNodeFontColor; tnThis.Tag = subrelation; nodeCollection.Add(tnThis); }
public Scale(Note root, IntervalSet intervals) { }
/// <summary> /// This method is called to leave error recovery mode after recovering from /// a recognition exception. /// </summary> /// <param name="recognizer"/> protected internal virtual void EndErrorCondition([NotNull] Parser recognizer) { errorRecoveryMode = false; lastErrorStates = null; lastErrorIndex = -1; }
public static ATN Deserialize(char[] data, bool optimize) { data = (char[])data.Clone(); // don't adjust the first value since that's the version number for (int i = 1; i < data.Length; i++) { data[i] = (char)(data[i] - 2); } int p = 0; int version = ToInt(data[p++]); if (version != SerializedVersion) { string reason = string.Format(CultureInfo.CurrentCulture, "Could not deserialize ATN with version {0} (expected {1})." , version, SerializedVersion); throw new NotSupportedException(reason); } Guid uuid = ToUUID(data, p); p += 8; if (!uuid.Equals(SerializedUuid)) { string reason = string.Format(CultureInfo.CurrentCulture, "Could not deserialize ATN with UUID {0} (expected {1})." , uuid, SerializedUuid); throw new NotSupportedException(reason); } ATNType grammarType = (ATNType)ToInt(data[p++]); int maxTokenType = ToInt(data[p++]); ATN atn = new ATN(grammarType, maxTokenType); // // STATES // IList <Tuple <LoopEndState, int> > loopBackStateNumbers = new List <Tuple <LoopEndState , int> >(); IList <Tuple <BlockStartState, int> > endStateNumbers = new List <Tuple <BlockStartState , int> >(); int nstates = ToInt(data[p++]); for (int i_1 = 0; i_1 < nstates; i_1++) { StateType stype = (StateType)ToInt(data[p++]); // ignore bad type of states if (stype == StateType.InvalidType) { atn.AddState(null); continue; } int ruleIndex = ToInt(data[p++]); if (ruleIndex == char.MaxValue) { ruleIndex = -1; } ATNState s = StateFactory(stype, ruleIndex); if (stype == StateType.LoopEnd) { // special case int loopBackStateNumber = ToInt(data[p++]); loopBackStateNumbers.Add(Tuple.Create((LoopEndState)s, loopBackStateNumber)); } else { if (s is BlockStartState) { int endStateNumber = ToInt(data[p++]); endStateNumbers.Add(Tuple.Create((BlockStartState)s, endStateNumber)); } } atn.AddState(s); } // delay the assignment of loop back and end states until we know all the state instances have been initialized foreach (Tuple <LoopEndState, int> pair in loopBackStateNumbers) { pair.Item1.loopBackState = atn.states[pair.Item2]; } foreach (Tuple <BlockStartState, int> pair_1 in endStateNumbers) { pair_1.Item1.endState = (BlockEndState)atn.states[pair_1.Item2]; } int numNonGreedyStates = ToInt(data[p++]); for (int i_2 = 0; i_2 < numNonGreedyStates; i_2++) { int stateNumber = ToInt(data[p++]); ((DecisionState)atn.states[stateNumber]).nonGreedy = true; } int numSllDecisions = ToInt(data[p++]); for (int i_3 = 0; i_3 < numSllDecisions; i_3++) { int stateNumber = ToInt(data[p++]); ((DecisionState)atn.states[stateNumber]).sll = true; } int numPrecedenceStates = ToInt(data[p++]); for (int i_4 = 0; i_4 < numPrecedenceStates; i_4++) { int stateNumber = ToInt(data[p++]); ((RuleStartState)atn.states[stateNumber]).isPrecedenceRule = true; } // // RULES // int nrules = ToInt(data[p++]); if (atn.grammarType == ATNType.Lexer) { atn.ruleToTokenType = new int[nrules]; atn.ruleToActionIndex = new int[nrules]; } atn.ruleToStartState = new RuleStartState[nrules]; for (int i_5 = 0; i_5 < nrules; i_5++) { int s = ToInt(data[p++]); RuleStartState startState = (RuleStartState)atn.states[s]; startState.leftFactored = ToInt(data[p++]) != 0; atn.ruleToStartState[i_5] = startState; if (atn.grammarType == ATNType.Lexer) { int tokenType = ToInt(data[p++]); if (tokenType == unchecked ((int)(0xFFFF))) { tokenType = TokenConstants.Eof; } atn.ruleToTokenType[i_5] = tokenType; int actionIndex = ToInt(data[p++]); if (actionIndex == unchecked ((int)(0xFFFF))) { actionIndex = -1; } atn.ruleToActionIndex[i_5] = actionIndex; } } atn.ruleToStopState = new RuleStopState[nrules]; foreach (ATNState state in atn.states) { if (!(state is RuleStopState)) { continue; } RuleStopState stopState = (RuleStopState)state; atn.ruleToStopState[state.ruleIndex] = stopState; atn.ruleToStartState[state.ruleIndex].stopState = stopState; } // // MODES // int nmodes = ToInt(data[p++]); for (int i_6 = 0; i_6 < nmodes; i_6++) { int s = ToInt(data[p++]); atn.modeToStartState.Add((TokensStartState)atn.states[s]); } atn.modeToDFA = new DFA[nmodes]; for (int i_7 = 0; i_7 < nmodes; i_7++) { atn.modeToDFA[i_7] = new DFA(atn.modeToStartState[i_7]); } // // SETS // IList <IntervalSet> sets = new List <IntervalSet>(); int nsets = ToInt(data[p++]); for (int i_8 = 0; i_8 < nsets; i_8++) { int nintervals = ToInt(data[p]); p++; IntervalSet set = new IntervalSet(); sets.Add(set); bool containsEof = ToInt(data[p++]) != 0; if (containsEof) { set.Add(-1); } for (int j = 0; j < nintervals; j++) { set.Add(ToInt(data[p]), ToInt(data[p + 1])); p += 2; } } // // EDGES // int nedges = ToInt(data[p++]); for (int i_9 = 0; i_9 < nedges; i_9++) { int src = ToInt(data[p]); int trg = ToInt(data[p + 1]); TransitionType ttype = (TransitionType)ToInt(data[p + 2]); int arg1 = ToInt(data[p + 3]); int arg2 = ToInt(data[p + 4]); int arg3 = ToInt(data[p + 5]); Transition trans = EdgeFactory(atn, ttype, src, trg, arg1, arg2, arg3, sets); // System.out.println("EDGE "+trans.getClass().getSimpleName()+" "+ // src+"->"+trg+ // " "+Transition.serializationNames[ttype]+ // " "+arg1+","+arg2+","+arg3); ATNState srcState = atn.states[src]; srcState.AddTransition(trans); p += 6; } // edges for rule stop states can be derived, so they aren't serialized foreach (ATNState state_1 in atn.states) { bool returningToLeftFactored = state_1.ruleIndex >= 0 && atn.ruleToStartState[state_1 .ruleIndex].leftFactored; for (int i_10 = 0; i_10 < state_1.NumberOfTransitions; i_10++) { Transition t = state_1.Transition(i_10); if (!(t is RuleTransition)) { continue; } RuleTransition ruleTransition = (RuleTransition)t; bool returningFromLeftFactored = atn.ruleToStartState[ruleTransition.target.ruleIndex ].leftFactored; if (!returningFromLeftFactored && returningToLeftFactored) { continue; } atn.ruleToStopState[ruleTransition.target.ruleIndex].AddTransition(new EpsilonTransition (ruleTransition.followState)); } } foreach (ATNState state_2 in atn.states) { if (state_2 is BlockStartState) { // we need to know the end state to set its start state if (((BlockStartState)state_2).endState == null) { throw new InvalidOperationException(); } // block end states can only be associated to a single block start state if (((BlockStartState)state_2).endState.startState != null) { throw new InvalidOperationException(); } ((BlockStartState)state_2).endState.startState = (BlockStartState)state_2; } if (state_2 is PlusLoopbackState) { PlusLoopbackState loopbackState = (PlusLoopbackState)state_2; for (int i_10 = 0; i_10 < loopbackState.NumberOfTransitions; i_10++) { ATNState target = loopbackState.Transition(i_10).target; if (target is PlusBlockStartState) { ((PlusBlockStartState)target).loopBackState = loopbackState; } } } else { if (state_2 is StarLoopbackState) { StarLoopbackState loopbackState = (StarLoopbackState)state_2; for (int i_10 = 0; i_10 < loopbackState.NumberOfTransitions; i_10++) { ATNState target = loopbackState.Transition(i_10).target; if (target is StarLoopEntryState) { ((StarLoopEntryState)target).loopBackState = loopbackState; } } } } } // // DECISIONS // int ndecisions = ToInt(data[p++]); for (int i_11 = 1; i_11 <= ndecisions; i_11++) { int s = ToInt(data[p++]); DecisionState decState = (DecisionState)atn.states[s]; atn.decisionToState.Add(decState); decState.decision = i_11 - 1; } atn.decisionToDFA = new DFA[ndecisions]; for (int i_12 = 0; i_12 < ndecisions; i_12++) { atn.decisionToDFA[i_12] = new DFA(atn.decisionToState[i_12], i_12); } if (optimize) { while (true) { int optimizationCount = 0; optimizationCount += InlineSetRules(atn); optimizationCount += CombineChainedEpsilons(atn); bool preserveOrder = atn.grammarType == ATNType.Lexer; optimizationCount += OptimizeSets(atn, preserveOrder); if (optimizationCount == 0) { break; } } } IdentifyTailCalls(atn); VerifyATN(atn); return(atn); }
private bool Validate(List <Edge> parse, List <IToken> i) { List <Edge> q = parse.ToList(); q.Reverse(); List <IToken> .Enumerator ei = _input.GetEnumerator(); List <Edge> .Enumerator eq = q.GetEnumerator(); bool fei = false; bool feq = false; for (; ;) { fei = ei.MoveNext(); IToken v = ei.Current; if (!fei) { break; } bool empty = true; for (; empty;) { feq = eq.MoveNext(); if (!feq) { break; } Edge x = eq.Current; switch (x._type) { case TransitionType.RULE: empty = true; break; case TransitionType.PREDICATE: empty = true; break; case TransitionType.ACTION: empty = true; break; case TransitionType.ATOM: empty = false; break; case TransitionType.EPSILON: empty = true; break; case TransitionType.INVALID: empty = true; break; case TransitionType.NOT_SET: empty = false; break; case TransitionType.PRECEDENCE: empty = true; break; case TransitionType.SET: empty = false; break; case TransitionType.WILDCARD: empty = false; break; default: throw new Exception(); } } Edge w = eq.Current; if (w == null && v == null) { return(true); } else if (w == null) { return(false); } else if (v == null) { return(false); } switch (w._type) { case TransitionType.ATOM: { IntervalSet set = w._label; if (set != null && set.Count > 0) { if (!set.Contains(v.Type)) { return(false); } } break; } case TransitionType.NOT_SET: { IntervalSet set = w._label; set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, _parser.Atn.maxTokenType)); if (set != null && set.Count > 0) { if (!set.Contains(v.Type)) { return(false); } } break; } case TransitionType.SET: { IntervalSet set = w._label; if (set != null && set.Count > 0) { if (!set.Contains(v.Type)) { return(false); } } break; } case TransitionType.WILDCARD: break; default: throw new Exception(); } } return(true); }
internal virtual void Operate(Interval operand1, ref IntervalSet output) { throw new NotImplementedException(); }
public NotSetTransition([NotNull] ATNState target, [Nullable] IntervalSet set) : base(target, set) { }
public IntervalSet Compute(Parser parser, CommonTokenStream token_stream) { _input = new List <IToken>(); _parser = parser; _token_stream = token_stream; //_cursor = _token_stream.GetTokens().Select(t => t.Text == "." ? t.TokenIndex : 0).Max(); _stop_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStopState.Select(t => parser.Atn.states[t.stateNumber])) { _stop_states.Add(s); } _start_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStartState.Select(t => parser.Atn.states[t.stateNumber])) { _start_states.Add(s); } int currentIndex = _token_stream.Index; _token_stream.Seek(0); int offset = 1; while (true) { IToken token = _token_stream.LT(offset++); _input.Add(token); if (token.Type == TokenConstants.EOF) { break; } _cursor = token.TokenIndex; } List <List <Edge> > all_parses = EnterState(null); IntervalSet result = new IntervalSet(); foreach (List <Edge> p in all_parses) { HashSet <ATNState> set = ComputeSingle(p); foreach (ATNState s in set) { foreach (Transition t in s.TransitionsArray) { switch (t.TransitionType) { case TransitionType.RULE: break; case TransitionType.PREDICATE: break; case TransitionType.WILDCARD: break; default: if (!t.IsEpsilon) { result.AddAll(t.Label); } break; } } } } return(result); }
public IntervalVector(IntervalSet set) { // 6 digits for 12 equal temperament intervals = new int[6]; }
/// <summary> /// The default implementation of /// <see cref="IAntlrErrorStrategy.Sync(Parser)"/> /// makes sure /// that the current lookahead symbol is consistent with what were expecting /// at this point in the ATN. You can call this anytime but ANTLR only /// generates code to check before subrules/loops and each iteration. /// <p>Implements Jim Idle's magic sync mechanism in closures and optional /// subrules. E.g.,</p> /// <pre> /// a : sync ( stuff sync )* ; /// sync : {consume to what can follow sync} ; /// </pre> /// At the start of a sub rule upon error, /// <see cref="Sync(Parser)"/> /// performs single /// token deletion, if possible. If it can't do that, it bails on the current /// rule and uses the default error recovery, which consumes until the /// resynchronization set of the current rule. /// <p>If the sub rule is optional ( /// <c>(...)?</c> /// , /// <c>(...)*</c> /// , or block /// with an empty alternative), then the expected set includes what follows /// the subrule.</p> /// <p>During loop iteration, it consumes until it sees a token that can start a /// sub rule or what follows loop. Yes, that is pretty aggressive. We opt to /// stay in the loop as long as possible.</p> /// <p><strong>ORIGINS</strong></p> /// <p>Previous versions of ANTLR did a poor job of their recovery within loops. /// A single mismatch token or missing token would force the parser to bail /// out of the entire rules surrounding the loop. So, for rule</p> /// <pre> /// classDef : 'class' ID '{' member* '}' /// </pre> /// input with an extra token between members would force the parser to /// consume until it found the next class definition rather than the next /// member definition of the current class. /// <p>This functionality cost a little bit of effort because the parser has to /// compare token set at the start of the loop and at each iteration. If for /// some reason speed is suffering for you, you can turn off this /// functionality by simply overriding this method as a blank { }.</p> /// </summary> /// <exception cref="Antlr4.Runtime.RecognitionException"/> public virtual void Sync(Parser recognizer) { ATNState s = recognizer.Interpreter.atn.states[recognizer.State]; // System.err.println("sync @ "+s.stateNumber+"="+s.getClass().getSimpleName()); // If already recovering, don't try to sync if (InErrorRecoveryMode(recognizer)) { return; } ITokenStream tokens = ((ITokenStream)recognizer.InputStream); int la = tokens.LA(1); // try cheaper subset first; might get lucky. seems to shave a wee bit off var nextTokens = recognizer.Atn.NextTokens(s); if (nextTokens.Contains(la)) { nextTokensContext = null; nextTokensState = ATNState.InvalidStateNumber; return; } if (nextTokens.Contains(TokenConstants.EPSILON)) { if (nextTokensContext == null) { // It's possible the next token won't match; information tracked // by sync is restricted for performance. nextTokensContext = recognizer.Context; nextTokensState = recognizer.State; } return; } switch (s.StateType) { case StateType.BlockStart: case StateType.StarBlockStart: case StateType.PlusBlockStart: case StateType.StarLoopEntry: { // report error and recover if possible if (SingleTokenDeletion(recognizer) != null) { return; } throw new InputMismatchException(recognizer); } case StateType.PlusLoopBack: case StateType.StarLoopBack: { // System.err.println("at loop back: "+s.getClass().getSimpleName()); ReportUnwantedToken(recognizer); IntervalSet expecting = recognizer.GetExpectedTokens(); IntervalSet whatFollowsLoopIterationOrRule = expecting.Or(GetErrorRecoverySet(recognizer)); ConsumeUntil(recognizer, whatFollowsLoopIterationOrRule); break; } default: { // do nothing if we can't identify the exact kind of ATN state break; } } }
internal override void Operate(BinaryInterval operands, ref IntervalSet output) { GetMinMax(operands, ref output, false); }
public static Nfa MatchComplement(params Interval[] excludedIntervals) { State startState = new State(); State endState = new State(); IntervalSet set = new IntervalSet(excludedIntervals).Complement(IntervalSet.CompleteInterval); foreach (var interval in set.Intervals) startState.AddTransition(new MatchRangeTransition(endState, interval)); return new Nfa(startState, endState); }
/// <summary> /// Collects possible tokens which could be matched following the given ATN state. This is essentially the same /// algorithm as used in the LL1Analyzer class, but here we consider predicates also and use no parser rule context. /// </summary> private void CollectFollowSets(ATNState startState, ATNState stopState, LinkedList <FollowSetWithPath> followSets, ISet <ATNState> seen, LinkedList <int> ruleStack) { if (seen.Contains(startState)) { return; } seen.Add(startState); if (startState.Equals(stopState) || startState.StateType == StateType.RuleStop) { var set = new FollowSetWithPath { Intervals = IntervalSet.Of(TokenConstants.Epsilon), Path = new List <int>(ruleStack) }; followSets.AddLast(set); return; } foreach (var transition in startState.Transitions) { if (transition.TransitionType == TransitionType.Rule) { var ruleTransition = (RuleTransition)transition; if (ruleStack.Find(ruleTransition.target.ruleIndex) != null) { continue; } ruleStack.AddLast(ruleTransition.target.ruleIndex); this.CollectFollowSets(transition.target, stopState, followSets, seen, ruleStack); ruleStack.RemoveLast(); } else if (transition.TransitionType == TransitionType.Predicate) { if (this.CheckPredicate((PredicateTransition)transition)) { this.CollectFollowSets(transition.target, stopState, followSets, seen, ruleStack); } } else if (transition.IsEpsilon) { this.CollectFollowSets(transition.target, stopState, followSets, seen, ruleStack); } else if (transition.TransitionType == TransitionType.Wildcard) { var set = new FollowSetWithPath { Intervals = IntervalSet.Of(TokenConstants.MinUserTokenType, this.atn.maxTokenType), Path = new List <int>(ruleStack) }; followSets.AddLast(set); } else { var label = transition.Label; if (label != null && label.Count > 0) { if (transition.TransitionType == TransitionType.NotSet) { label = label.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, this.atn.maxTokenType)); } var set = new FollowSetWithPath { Intervals = label, Path = new List <int>(ruleStack), Following = this.GetFollowingTokens(transition) }; followSets.AddLast(set); } } } }
protected virtual void TryParse(T parser, List <MultipleDecisionData> potentialAlternatives, List <int> currentPath, IDictionary <RuleContext, CaretReachedException> results) { RuleContext parseTree; try { parser.Interpreter.SetFixedDecisions(potentialAlternatives, currentPath); parseTree = ParseImpl(parser); results[parseTree] = null; } catch (CaretReachedException ex) { if (ex.Transitions == null) { return; } if (ex.InnerException is FailedPredicateException) { return; } for (parseTree = ex.FinalContext; parseTree.Parent != null; parseTree = parseTree.Parent) { // intentionally blank } if (ex.InnerException != null) { IntervalSet alts = new IntervalSet(); IntervalSet semanticAlts = new IntervalSet(); foreach (ATNConfig c in ex.Transitions.Keys) { if (semanticAlts.Contains(c.Alt)) { continue; } alts.Add(c.Alt); var recognizer = parser as Recognizer <IToken, ParserATNSimulator>; if (recognizer == null || c.SemanticContext.Eval(recognizer, ex.FinalContext)) { semanticAlts.Add(c.Alt); } } if (alts.Count != semanticAlts.Count) { Console.WriteLine("Forest decision {0} reduced to {1} by predicate evaluation.", alts, semanticAlts); } int inputIndex = parser.InputStream.Index; int decision = 0; int stateNumber = ex.InnerException.OffendingState; ATNState state = parser.Atn.states[stateNumber]; if (state is StarLoopbackState) { Debug.Assert(state.NumberOfTransitions == 1 && state.OnlyHasEpsilonTransitions); Debug.Assert(state.Transition(0).target is StarLoopEntryState); state = state.Transition(0).target; } else { PlusBlockStartState plusBlockStartState = state as PlusBlockStartState; if (plusBlockStartState != null && plusBlockStartState.decision == -1) { state = plusBlockStartState.loopBackState; Debug.Assert(state != null); } } DecisionState decisionState = state as DecisionState; if (decisionState != null) { decision = decisionState.decision; if (decision < 0) { Debug.WriteLine(string.Format("No decision number found for state {0}.", state.stateNumber)); } } else { if (state != null) { Debug.WriteLine(string.Format("No decision number found for state {0}.", state.stateNumber)); } else { Debug.WriteLine("No decision number found for state <null>."); } // continuing is likely to terminate return; } Debug.Assert(semanticAlts.MinElement >= 1); Debug.Assert(semanticAlts.MaxElement <= parser.Atn.decisionToState[decision].NumberOfTransitions); int[] alternatives = semanticAlts.ToArray(); MultipleDecisionData decisionData = new MultipleDecisionData(inputIndex, decision, alternatives); potentialAlternatives.Add(decisionData); currentPath.Add(-1); } else { results[parseTree] = ex; } } catch (RecognitionException ex) { // not a viable path } }
public void DrawImage(System.Drawing.Graphics graphics) { float width = graphics.VisibleClipBounds.Width; float height = graphics.VisibleClipBounds.Height; Pen penGrid = new Pen(ColorGrid, 1); graphics.FillRectangle(BrushBackground, 0, 0, width, height); #region Horizontal Grid for (int i = 0; i < 5; i++) { float y = (height - MarginTop - MarginBottom) / 4 * i + MarginTop; graphics.DrawLine(penGrid, MarginLeft, y, width - MarginRight, y); string t = ""; switch (4 - i) { case 0: t = "0"; break; case 1: t = "¼"; break; case 2: t = "½"; break; case 3: t = "¾"; break; case 4: t = "1"; break; } graphics.DrawString(t, FontGrid, new SolidBrush(ColorGrid), MarginLeft / 2, y - 4); } graphics.DrawString("µ", FontGrid, new SolidBrush(ColorGrid), MarginLeft / 8, MarginTop - 4); #endregion #region Vertical grid if (_variableDimension is IContinuousDimension) { IContinuousDimension dim = (IContinuousDimension)_variableDimension; if (dim.Unit != "") { SizeF size = graphics.MeasureString(dim.Unit, FontGrid); graphics.DrawString(dim.Unit, FontGrid, new SolidBrush(ColorGrid), width - MarginRight - size.Width, height - MarginBottom + (float)(size.Height * 1.25)); } } decimal minValue; decimal maxValue; List <decimal> significantValues = _variableDimension.SignificantValues.ToList <decimal>(); if (this.SupportOnly) { if (_variableDimension is IContinuousDimension) { IContinuousDimension dim = (IContinuousDimension)_variableDimension; decimal ls = _relation.GetLowerSupportBound(inputsWithoutVariableInput); decimal us = _relation.GetUpperSupportBound(inputsWithoutVariableInput); decimal distance = us - ls; ls = ls - (distance * (decimal)(SupportSurroundings / 100)); us = us + (distance * (decimal)(SupportSurroundings / 100)); if (ls < dim.MinValue) { ls = dim.MinValue; } if (us > dim.MaxValue) { us = dim.MaxValue; } //If there is too little left, though, use at least two. while (significantValues.Count > 2) { if (significantValues[1] < ls) { significantValues.RemoveAt(0); } else if (significantValues[significantValues.Count - 2] > us) { significantValues.RemoveAt(significantValues.Count - 1); } else { break; } } } else { int valuesLeft = significantValues.Count; for (int i = significantValues.Count - 1; i >= 0; i--) { if (isMember(significantValues[i]) == 0) { significantValues.Remove(significantValues[i]); valuesLeft--; if (valuesLeft <= 3) { break; } } } } } if (_variableDimension is IContinuousDimension) { minValue = significantValues[0]; maxValue = significantValues[significantValues.Count - 1]; } else { minValue = 0; maxValue = significantValues.Count - 1; } bool toggledMode = MaxValueCountInSingleRow < significantValues.Count; bool oddCount = ((significantValues.Count % 2) == 1); uint c = 0; foreach (decimal gridValue in significantValues) { decimal value; c++; bool sub = (((c % 2) == 0) && oddCount) || (((c % 2) == 1) && !oddCount); string label; if (_variableDimension is IContinuousDimension) { label = gridValue.ToString(); value = gridValue; } else { IDiscreteDimension dim = (IDiscreteDimension)_variableDimension; if (dim.DefaultSet != null) { label = dim.DefaultSet.GetMember(gridValue).Caption; } else { label = "#" + gridValue.ToString("F0"); } value = c - 1; } float x = ((width - MarginLeft - MarginRight) / ((float)(maxValue - minValue))) * ((float)(value - minValue)) + MarginLeft; graphics.DrawLine(penGrid, x, MarginTop, x, height - MarginBottom); SizeF size = graphics.MeasureString(label, FontGrid); graphics.DrawString(label, FontGrid, new SolidBrush(ColorGrid), x - size.Width / 2, height - MarginBottom + size.Height / 4 + (sub ? size.Height : 0)); #region Memberships for discrete set if (_variableDimension is IDiscreteDimension) { graphics.DrawLine(PenLine, x, MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - isMember(gridValue)), x, height - MarginBottom); if (FullySpecified && gridValue == SpecifiedValue.Value) { graphics.DrawLine(PenValue, x, MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - isMember(gridValue)), x, height - MarginBottom); graphics.DrawLine(PenValue, MarginLeft, MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - isMember(gridValue)), x, MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - isMember(gridValue))); } } #endregion } #endregion #region Line for continuous dimension if (_variableDimension is IContinuousDimension) { IntervalSet intervals = _relation.GetFunction(inputsWithoutVariableInput); foreach (Interval interval in intervals.Intervals) { if ( //(interval.LowerBound <= minValue && interval.UpperBound >= maxValue) || (interval.LowerBound >= minValue && interval.UpperBound <= maxValue) || (interval.LowerBound <= minValue && interval.UpperBound >= minValue) || (interval.LowerBound <= maxValue && interval.UpperBound >= maxValue) ) { decimal intervalMinValue = (interval.LowerBound < minValue ? minValue : interval.LowerBound); decimal intervalMaxValue = (interval.UpperBound > maxValue ? maxValue : interval.UpperBound); float intervalMinX = MarginLeft + ((width - MarginLeft - MarginRight) / (float)(maxValue - minValue)) * ((float)(intervalMinValue - minValue)); float intervalMaxX = MarginLeft + ((width - MarginLeft - MarginRight) / (float)(maxValue - minValue)) * ((float)(intervalMaxValue - minValue)); for (float x = intervalMinX; x <= intervalMaxX; x++) { decimal percentage; if ((intervalMaxX - intervalMinX) == 0) { percentage = 0; } else { percentage = (decimal)((x - intervalMinX) / (intervalMaxX - intervalMinX)); } decimal value = ((intervalMaxValue - intervalMinValue) * percentage) + intervalMinValue; double membership = isMember(value); //note that if y1 == y2 && x1 == x2, no point is plotted. graphics.DrawLine(PenLine, x, MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - membership), x, height - MarginBottom); } } } if (FullySpecified) { decimal percentage; if (SpecifiedValue.Value - minValue == 0) { percentage = 0; } else { percentage = (SpecifiedValue.Value - minValue) / (maxValue - minValue); } float x = (width - MarginLeft - MarginRight) * (float)percentage + MarginLeft; float y = MarginTop + (height - MarginBottom - MarginTop) * (float)(1 - SpecifiedMembership.Value); graphics.DrawLine(PenValue, x, y, x, height - MarginBottom); graphics.DrawLine(PenValue, MarginLeft, y, x, y); //graphics.DrawLine(PenValue, 0, 0, 100, 100); } } #endregion }
/// <summary> /// Compute set of tokens that can follow /// <paramref name="s"/> /// in the ATN in the /// specified /// <paramref name="ctx"/> /// . /// <p/> /// If /// <paramref name="ctx"/> /// is /// <see cref="PredictionContext.EmptyLocal"/> /// and /// <paramref name="stopState"/> /// or the end of the rule containing /// <paramref name="s"/> /// is reached, /// <see cref="TokenConstants.EPSILON"/> /// is added to the result set. If /// <paramref name="ctx"/> /// is not /// <see cref="PredictionContext.EmptyLocal"/> /// and /// <paramref name="addEOF"/> /// is /// <see langword="true"/> /// and /// <paramref name="stopState"/> /// or the end of the outermost rule is reached, /// <see cref="TokenConstants.EOF"/> /// is added to the result set. /// </summary> /// <param name="s">the ATN state.</param> /// <param name="stopState"> /// the ATN state to stop at. This can be a /// <see cref="BlockEndState"/> /// to detect epsilon paths through a closure. /// </param> /// <param name="ctx"> /// The outer context, or /// <see cref="PredictionContext.EmptyLocal"/> /// if /// the outer context should not be used. /// </param> /// <param name="look">The result lookahead set.</param> /// <param name="lookBusy"> /// A set used for preventing epsilon closures in the ATN /// from causing a stack overflow. Outside code should pass /// <c>new HashSet<ATNConfig></c> /// for this argument. /// </param> /// <param name="calledRuleStack"> /// A set used for preventing left recursion in the /// ATN from causing a stack overflow. Outside code should pass /// <c>new BitSet()</c> /// for this argument. /// </param> /// <param name="seeThruPreds"> /// /// <see langword="true"/> /// to true semantic predicates as /// implicitly /// <see langword="true"/> /// and "see through them", otherwise /// <see langword="false"/> /// to treat semantic predicates as opaque and add /// <see cref="HitPred"/> /// to the /// result if one is encountered. /// </param> /// <param name="addEOF"> /// Add /// <see cref="TokenConstants.EOF"/> /// to the result if the end of the /// outermost context is reached. This parameter has no effect if /// <paramref name="ctx"/> /// is /// <see cref="PredictionContext.EmptyLocal"/> /// . /// </param> protected internal virtual void Look(ATNState s, ATNState stopState, PredictionContext ctx, IntervalSet look, HashSet <ATNConfig> lookBusy, BitSet calledRuleStack, bool seeThruPreds, bool addEOF) { // System.out.println("_LOOK("+s.stateNumber+", ctx="+ctx); ATNConfig c = new ATNConfig(s, 0, ctx); if (!lookBusy.Add(c)) { return; } if (s == stopState) { if (ctx == null) { look.Add(TokenConstants.EPSILON); return; } else if (ctx.IsEmpty && addEOF) { look.Add(TokenConstants.EOF); return; } } if (s is RuleStopState) { if (ctx == null) { look.Add(TokenConstants.EPSILON); return; } else if (ctx.IsEmpty && addEOF) { look.Add(TokenConstants.EOF); return; } if (ctx != PredictionContext.EMPTY) { for (int i = 0; i < ctx.Size; i++) { ATNState returnState = atn.states[ctx.GetReturnState(i)]; bool removed = calledRuleStack.Get(returnState.ruleIndex); try { calledRuleStack.Clear(returnState.ruleIndex); Look(returnState, stopState, ctx.GetParent(i), look, lookBusy, calledRuleStack, seeThruPreds, addEOF); } finally { if (removed) { calledRuleStack.Set(returnState.ruleIndex); } } } return; } } int n = s.NumberOfTransitions; for (int i_1 = 0; i_1 < n; i_1++) { Transition t = s.Transition(i_1); if (t is RuleTransition) { RuleTransition ruleTransition = (RuleTransition)t; if (calledRuleStack.Get(ruleTransition.ruleIndex)) { continue; } PredictionContext newContext = SingletonPredictionContext.Create(ctx, ruleTransition.followState.stateNumber); try { calledRuleStack.Set(ruleTransition.target.ruleIndex); Look(t.target, stopState, newContext, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); } finally { calledRuleStack.Clear(ruleTransition.target.ruleIndex); } } else { if (t is AbstractPredicateTransition) { if (seeThruPreds) { Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); } else { look.Add(HitPred); } } else { if (t.IsEpsilon) { Look(t.target, stopState, ctx, look, lookBusy, calledRuleStack, seeThruPreds, addEOF); } else { if (t is WildcardTransition) { look.AddAll(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType)); } else { IntervalSet set = t.Label; if (set != null) { if (t is NotSetTransition) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, atn.maxTokenType)); } look.AddAll(set); } } } } } } }
public DiscreteSet(IDiscreteDimension dimension, string caption) : base(dimension, caption) { _intervals = new IntervalSet(dimension); }
private static Transition updateTransition(Transition t, char openDelimiter, char closeDelimiter) { Transition updated = null; if (t is RuleTransition) { return(null); } else if (t is AtomTransition) { AtomTransition atomTransition = (AtomTransition)t; int newLabel; if (atomTransition.label == OpenDelimiterPlaceholder) { newLabel = openDelimiter; } else if (atomTransition.label == CloseDelimiterPlaceholder) { newLabel = closeDelimiter; } else { return(null); } updated = new AtomTransition(t.target, newLabel); } else if (t is NotSetTransition) { NotSetTransition notSetTransition = (NotSetTransition)t; int removeLabel; int addLabel; if (notSetTransition.set.Contains(OpenDelimiterPlaceholder)) { removeLabel = OpenDelimiterPlaceholder; addLabel = openDelimiter; } else if (notSetTransition.set.Contains(CloseDelimiterPlaceholder)) { removeLabel = CloseDelimiterPlaceholder; addLabel = closeDelimiter; } else { return(null); } IntervalSet set = new IntervalSet(notSetTransition.set); set.Remove(removeLabel); set.Add(addLabel); set.SetReadonly(true); updated = new NotSetTransition(t.target, set); } else if (t is SetTransition) { SetTransition setTransition = (SetTransition)t; int removeLabel; int addLabel; if (setTransition.set.Contains(OpenDelimiterPlaceholder)) { removeLabel = OpenDelimiterPlaceholder; addLabel = openDelimiter; } else if (setTransition.set.Contains(CloseDelimiterPlaceholder)) { removeLabel = CloseDelimiterPlaceholder; addLabel = closeDelimiter; } else { return(null); } IntervalSet set = new IntervalSet(setTransition.set); set.Remove(removeLabel); set.Add(addLabel); set.SetReadonly(true); updated = createSetTransition(t.target, set); } else if (t is RangeTransition) { RangeTransition rangeTransition = (RangeTransition)t; int removeLabel; int addLabel; if (rangeTransition.from <= OpenDelimiterPlaceholder && rangeTransition.to >= OpenDelimiterPlaceholder) { removeLabel = OpenDelimiterPlaceholder; addLabel = openDelimiter; } else if (rangeTransition.from <= CloseDelimiterPlaceholder && rangeTransition.to >= CloseDelimiterPlaceholder) { removeLabel = CloseDelimiterPlaceholder; addLabel = closeDelimiter; } else { return(null); } IntervalSet set = IntervalSet.Of(rangeTransition.from, rangeTransition.to); set.Remove(removeLabel); set.Add(addLabel); set.SetReadonly(true); updated = createSetTransition(t.target, set); } return(updated); }
public ContinuousSet(IContinuousDimension dimension, IntervalSet intervals) : base(dimension, intervals) { }
public IntervalSet Compute(Parser parser, CommonTokenStream token_stream, int line, int col) { _input = new List <IToken>(); _parser = parser; _token_stream = token_stream; _stop_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStopState.Select(t => parser.Atn.states[t.stateNumber])) { _stop_states.Add(s); } _start_states = new HashSet <ATNState>(); foreach (ATNState s in parser.Atn.ruleToStartState.Select(t => parser.Atn.states[t.stateNumber])) { _start_states.Add(s); } int currentIndex = _token_stream.Index; _token_stream.Seek(0); int offset = 1; while (true) { IToken token = _token_stream.LT(offset++); _input.Add(token); _cursor = token.TokenIndex; if (token.Type == TokenConstants.EOF) { break; } if (token.Line >= line && token.Column >= col) { break; } } _token_stream.Seek(currentIndex); List <List <Edge> > all_parses = EnterState(new Edge() { _index = 0, _index_at_transition = 0, _to = _parser.Atn.states[0], _type = TransitionType.EPSILON }); // Remove last token on input. _input.RemoveAt(_input.Count - 1); // Eliminate all paths that don't consume all input. List <List <Edge> > temp = new List <List <Edge> >(); if (all_parses != null) { foreach (List <Edge> p in all_parses) { //System.Console.Error.WriteLine(PrintSingle(p)); if (Validate(p, _input)) { temp.Add(p); } } } all_parses = temp; if (all_parses != null && _log_closure) { foreach (List <Edge> p in all_parses) { System.Console.Error.WriteLine("Path " + PrintSingle(p)); } } IntervalSet result = new IntervalSet(); if (all_parses != null) { foreach (List <Edge> p in all_parses) { HashSet <ATNState> set = ComputeSingle(p); if (_log_closure) { System.Console.Error.WriteLine("All states for path " + string.Join(" ", set.ToList())); } foreach (ATNState s in set) { foreach (Transition t in s.TransitionsArray) { switch (t.TransitionType) { case TransitionType.RULE: break; case TransitionType.PREDICATE: break; case TransitionType.WILDCARD: break; default: if (!t.IsEpsilon) { result.AddAll(t.Label); } break; } } } } } return(result); }
private static int OptimizeSets(ATN atn, bool preserveOrder) { if (preserveOrder) { // this optimization currently doesn't preserve edge order. return(0); } int removedPaths = 0; IList <DecisionState> decisions = atn.decisionToState; foreach (DecisionState decision in decisions) { IntervalSet setTransitions = new IntervalSet(); for (int i = 0; i < decision.NumberOfOptimizedTransitions; i++) { Transition epsTransition = decision.GetOptimizedTransition(i); if (!(epsTransition is EpsilonTransition)) { continue; } if (epsTransition.target.NumberOfOptimizedTransitions != 1) { continue; } Transition transition = epsTransition.target.GetOptimizedTransition(0); if (!(transition.target is BlockEndState)) { continue; } if (transition is NotSetTransition) { // TODO: not yet implemented continue; } if (transition is AtomTransition || transition is RangeTransition || transition is SetTransition) { setTransitions.Add(i); } } if (setTransitions.Count <= 1) { continue; } IList <Transition> optimizedTransitions = new List <Transition>(); for (int i_1 = 0; i_1 < decision.NumberOfOptimizedTransitions; i_1++) { if (!setTransitions.Contains(i_1)) { optimizedTransitions.Add(decision.GetOptimizedTransition(i_1)); } } ATNState blockEndState = decision.GetOptimizedTransition(setTransitions.MinElement).target.GetOptimizedTransition(0).target; IntervalSet matchSet = new IntervalSet(); for (int i_2 = 0; i_2 < setTransitions.GetIntervals().Count; i_2++) { Interval interval = setTransitions.GetIntervals()[i_2]; for (int j = interval.a; j <= interval.b; j++) { Transition matchTransition = decision.GetOptimizedTransition(j).target.GetOptimizedTransition(0); if (matchTransition is NotSetTransition) { throw new NotSupportedException("Not yet implemented."); } else { matchSet.AddAll(matchTransition.Label); } } } Transition newTransition; if (matchSet.GetIntervals().Count == 1) { if (matchSet.Count == 1) { newTransition = new AtomTransition(blockEndState, matchSet.MinElement); } else { Interval matchInterval = matchSet.GetIntervals()[0]; newTransition = new RangeTransition(blockEndState, matchInterval.a, matchInterval.b); } } else { newTransition = new SetTransition(blockEndState, matchSet); } ATNState setOptimizedState = new BasicState(); setOptimizedState.SetRuleIndex(decision.ruleIndex); atn.AddState(setOptimizedState); setOptimizedState.AddTransition(newTransition); optimizedTransitions.Add(new EpsilonTransition(setOptimizedState)); removedPaths += decision.NumberOfOptimizedTransitions - optimizedTransitions.Count; if (decision.IsOptimized) { while (decision.NumberOfOptimizedTransitions > 0) { decision.RemoveOptimizedTransition(decision.NumberOfOptimizedTransitions - 1); } } foreach (Transition transition_1 in optimizedTransitions) { decision.AddOptimizedTransition(transition_1); } } return(removedPaths); }
// Step to state and continue parsing input. // Returns a list of transitions leading to a state that accepts input. List <List <Edge> > EnterState(Edge t) { int here = ++entry_value; int index_on_transition; int token_index; ATNState state; if (t == null) { token_index = 0; index_on_transition = 0; state = _parser.Atn.states[0]; } else { token_index = t._index; index_on_transition = t._index_at_transition; state = t._to; } var input_token = _input[token_index]; if (_log_parse) { System.Console.Error.WriteLine("Entry " + here + " State " + state + " tokenIndex " + token_index + " " + input_token.Text ); } // Upon reaching the cursor, return match. var at_match = input_token.TokenIndex >= _cursor; if (at_match) { if (_log_parse) { System.Console.Error.Write("Entry " + here + " return "); } var res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { var str = PrintResult(res); System.Console.Error.WriteLine(str); } return(res); } if (_visited.ContainsKey(new Pair <ATNState, int>(state, token_index))) { return(null); } _visited[new Pair <ATNState, int>(state, token_index)] = true; var result = new List <List <Edge> >(); if (this._stop_states.Contains(state)) { if (_log_parse) { System.Console.Error.Write("Entry " + here + " return "); } var res = new List <List <Edge> >() { new List <Edge>() { t } }; if (_log_parse) { var str = PrintResult(res); System.Console.Error.WriteLine(str); } return(res); } // Search all transitions from state. foreach (Transition transition in state.TransitionsArray) { List <List <Edge> > matches = null; switch (transition.TransitionType) { case TransitionType.RULE: { var rule = (RuleTransition)transition; var sub_state = rule.target; matches = this.EnterState(new Edge() { _from = state, _to = rule.target, _follow = rule.followState, _label = rule.Label, _type = rule.TransitionType, _index = token_index, _index_at_transition = token_index }); if (matches != null && matches.Count == 0) { throw new Exception(); } if (matches != null) { List <List <Edge> > new_matches = new List <List <Edge> >(); foreach (var match in matches) { var f = match.First(); // "to" is possibly final state of submachine. var l = match.Last(); // "to" is start state of submachine. var is_final = this._stop_states.Contains(f._to); var is_at_caret = f._index >= _cursor; if (!is_final) { new_matches.Add(match); } else { var xxx = this.EnterState(new Edge() { _from = f._to, _to = rule.followState, _label = null, _type = TransitionType.EPSILON, _index = f._index, _index_at_transition = f._index }); if (xxx != null && xxx.Count == 0) { throw new Exception(); } if (xxx != null) { foreach (var y in xxx) { var copy = y.ToList(); foreach (var q in match) { copy.Add(q); } new_matches.Add(copy); } } } } matches = new_matches; } } break; case TransitionType.PREDICATE: if (this.CheckPredicate((PredicateTransition)transition)) { matches = this.EnterState(new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }); if (matches != null && matches.Count == 0) { throw new Exception(); } } break; case TransitionType.WILDCARD: matches = this.EnterState(new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }); if (matches != null && matches.Count == 0) { throw new Exception(); } break; default: if (transition.IsEpsilon) { matches = this.EnterState(new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index, _index_at_transition = token_index }); if (matches != null && matches.Count == 0) { throw new Exception(); } } else { var set = transition.Label; if (set != null && set.Count > 0) { if (transition.TransitionType == TransitionType.NOT_SET) { set = set.Complement(IntervalSet.Of(TokenConstants.MinUserTokenType, _parser.Atn.maxTokenType)); } if (set.Contains(input_token.Type)) { matches = this.EnterState(new Edge() { _from = state, _to = transition.target, _label = transition.Label, _type = transition.TransitionType, _index = token_index + 1, _index_at_transition = token_index }); if (matches != null && matches.Count == 0) { throw new Exception(); } } } } break; } if (matches != null) { foreach (List <Edge> match in matches) { var x = match.ToList(); if (t != null) { x.Add(t); Edge prev = null; foreach (var z in x) { var ff = z._to; if (prev != null) { if (prev._from != ff) { System.Console.Error.WriteLine("Fail " + PrintSingle(x)); Debug.Assert(false); } } prev = z; } } result.Add(x); } } } if (result.Count == 0) { return(null); } if (_log_parse) { System.Console.Error.Write("Entry " + here + " return "); var str = PrintResult(result); System.Console.Error.WriteLine(str); } return(result); }
public override IntervalSet GetExpectedTokens() { LastExpectedTokens = base.GetExpectedTokens(); return(LastExpectedTokens); }