private void RecomputeAndResolveConflicts(ParserState state) { if (!state.BuilderData.IsInadequate) { return; } state.BuilderData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce foreach (var item in state.BuilderData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce foreach (var term in state.BuilderData.ShiftTerms) { if (allLkhds.Contains(term) && !state.BuilderData.JumpLookaheads.Contains(term)) { state.BuilderData.Conflicts.Add(term); } } state.BuilderData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); state.BuilderData.Conflicts.ExceptWith(state.BuilderData.JumpLookaheads); //Now resolve conflicts by hints and precedence ResolveConflictsByHints(state); ResolveConflictsByPrecedence(state); }//method
} //method #endregion #region Analyzing and resolving conflicts private void ComputeConflicts() { foreach (var state in _data.States) { if (!state.BuilderData.IsInadequate) { continue; } //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach (var item in stateData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach (var term in stateData.ShiftTerminals) { if (allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } } }//method
public virtual void VerifyLanguageData() { var gd = Context.Language.GrammarData; //Collect all terminals and non-terminals var terms = new BnfTermSet(); //SL does not understand co/contravariance, so doing merge one-by-one foreach (var t in gd.Terminals) terms.Add(t); foreach (var t in gd.NonTerminals) terms.Add(t); var missingList = new BnfTermList(); foreach (var term in terms) { var terminal = term as Terminal; if (terminal != null && terminal.Category != TokenCategory.Content) continue; //only content terminals if (term.Flags.IsSet(TermFlags.NoAstNode)) continue; var config = term.AstConfig; if (config.NodeCreator != null || config.DefaultNodeCreator != null) continue; //We must check NodeType if (config.NodeType == null) config.NodeType = GetDefaultNodeType(term); if (config.NodeType == null) missingList.Add(term); else config.DefaultNodeCreator = CompileDefaultNodeCreator(config.NodeType); } if (missingList.Count > 0) // AST node type is not specified for term {0}. Either assign Term.AstConfig.NodeType, or specify default type(s) in AstBuilder. Context.AddMessage(ErrorLevel.Error, SourceLocation.Empty, Resources.ErrNodeTypeNotSetOn, string.Join(", ", missingList)); Context.Language.AstDataVerified = true; }
public BnfTermSet GetShiftReduceConflicts() { var result = new BnfTermSet(); result.UnionWith(Conflicts); result.IntersectWith(ShiftTerms); return(result); }
public BnfTermSet GetReduceReduceConflicts() { var result = new BnfTermSet(); result.UnionWith(Conflicts); result.ExceptWith(ShiftTerms); return(result); }
} //method #endregion #region Analyzing and resolving conflicts private void ComputeAndResolveConflicts() { foreach (var state in Data.States) { if (!state.BuilderData.IsInadequate) { continue; } //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach (var item in stateData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach (var term in stateData.ShiftTerminals) { if (allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } //Now resolve conflicts by hints and precedence ------------------------------------------------------- if (stateData.Conflicts.Count > 0) { //Hints foreach (var conflict in stateData.Conflicts) { ResolveConflictByHints(state, conflict); } stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //Precedence foreach (var conflict in stateData.Conflicts) { ResolveConflictByPrecedence(state, conflict); } stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //if we still have conflicts, report and assign default action if (stateData.Conflicts.Count > 0) { ReportAndCreateDefaultActionsForConflicts(state); } } //if Conflicts.Count > 0 } } //method
public virtual void VerifyLanguageData() { var gd = Context.Language.GrammarData; //Collect all terminals and non-terminals var terms = new BnfTermSet(); //SL does not understand co/contravariance, so doing merge one-by-one foreach (var t in gd.Terminals) { terms.Add(t); } foreach (var t in gd.NonTerminals) { terms.Add(t); } var missingList = new BnfTermList(); foreach (var term in terms) { var terminal = term as Terminal; if (terminal != null && terminal.Category != TokenCategory.Content) { continue; //only content terminals } if (term.Flags.IsSet(TermFlags.NoAstNode)) { continue; } var config = term.AstConfig; if (config.NodeCreator != null || config.DefaultNodeCreator != null) { continue; } //We must check NodeType if (config.NodeType == null) { config.NodeType = GetDefaultNodeType(term); } if (config.NodeType == null) { missingList.Add(term); } else { config.DefaultNodeCreator = CompileDefaultNodeCreator(config.NodeType); } } if (missingList.Count > 0) { // AST node type is not specified for term {0}. Either assign Term.AstConfig.NodeType, or specify default type(s) in AstBuilder. Context.AddMessage(ErrorLevel.Error, SourceLocation.Empty, Resources.ErrNodeTypeNotSetOn, missingList.ToString()); } Context.Language.AstDataVerified = true; }
private void ReportParseConflicts(ParserState state, BnfTermSet shiftReduceConflicts, BnfTermSet reduceReduceConflicts) { if (shiftReduceConflicts.Count > 0) { ReportParseConflicts("Shift-reduce conflict.", state, shiftReduceConflicts); } if (reduceReduceConflicts.Count > 0) { ReportParseConflicts("Reduce-reduce conflict.", state, reduceReduceConflicts); } }
public string ToString(BnfTermSet exceptLookaheads) { string s = Core.ToString(); if (!this.Core.IsFinal) { return(s); } var lkhds = new BnfTermSet(); lkhds.UnionWith(Lookaheads); lkhds.ExceptWith(exceptLookaheads); s += " [" + lkhds.ToString() + "]"; return(s); }
}//method /* Detecting conflicts that cannot be resolved by tail wrapping * 1. Shift-reduce conflicts. If inadequate state S has shift item based on the same core as source * of one of reduced lookaheads of reduce item, then the conflict is unresolvable - * no wrapping of lookahead would resolve ambiguity * 2. Reduce-reduce conflict. If reduce items in inadequate state have reduced lookaheads * with sources having the same core (LR0 item) then we have unresolvable conflict. Wrapping of the item tail would produce * the same new non-terminal as lookahead in both conflicting reduce items. */ private void DetectConflictsUnresolvableByRestructuring(ParserState state) { //compute R-R and S-R conflicting lookaheads var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); var conflictSources = new LR0ItemSet(); foreach (var conflict in state.BuilderData.Conflicts) { var nonConflictingSourceCores = new LR0ItemSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var source in reduceItem.ReducedLookaheadSources) { if (source.Core.Current != conflict) { continue; } if (state.BuilderData.Cores.Contains(source.Core)) //we have unresolvable shift-reduce { srConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else if (nonConflictingSourceCores.Contains(source.Core)) //unresolvable reduce-reduce { rrConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else { nonConflictingSourceCores.Add(source.Core); } } //foreach source } //foreach item } //foreach conflict if (srConflicts.Count > 0) { ReportParseConflicts("Ambiguous grammar, unresolvable shift-reduce conflicts.", state, srConflicts); } if (rrConflicts.Count > 0) { ReportParseConflicts("Ambiguous grammar, unresolvable reduce-reduce conflicts.", state, rrConflicts); } //create default actions and remove them from conflict list, so we don't deal with them anymore CreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); } // method
}//method private void CreateDefaultActionsForConflicts(ParserState state, BnfTermSet shiftReduceConflicts, BnfTermSet reduceReduceConflicts) { var stateData = state.BuilderData; //Create default actions for these conflicts. For shift-reduce, default action is shift, and shift action already // exist for all shifts from the state, so we don't need to do anything. For reduce-reduce create reduce actions // for the first reduce item (whatever comes first in the set). foreach (var conflict in reduceReduceConflicts) { var reduceItems = stateData.ReduceItems.SelectByLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(shiftReduceConflicts); stateData.ResolvedConflicts.UnionWith(reduceReduceConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) { if (nt.NodeCreator != null || nt.NodeType != null) { ntSet.Add(nt); } } if (ntSet.Count > 0) { this._language.Errors.Add("Warning: LanguageFlags.CreateAst flag is not set in grammar's Flags, but there are" + " non-terminals that have NodeType or NodeCreator property set. If you want Irony to construct AST tree during parsing," + " set CreateAst flag in Grammar. Non-terminals: " + ntSet.ToString()); } } }//method
private void ComputeReportedExpectedSet(ParserState state) { //2. Compute reduced expected terms - to be used in error reporting //2.1. Scan Expected terms, add non-terminals with non-empty DisplayName to reduced set, and collect all their firsts var reducedSet = state.ReportedExpectedSet = new BnfTermSet(); var allFirsts = new BnfTermSet(); foreach (var term in state.ExpectedTerms) { var nt = term as NonTerminal; if (nt == null) continue; if (!reducedSet.Contains(nt) && !string.IsNullOrEmpty(nt.DisplayName) && !allFirsts.Contains(nt)) { reducedSet.Add(nt); allFirsts.UnionWith(nt.Firsts); } } //2.2. Now go thru all expected terms and add only those that are NOT in the allFirsts set. foreach (var term in state.ExpectedTerms) { if (!reducedSet.Contains(term) && !allFirsts.Contains(term) && (term is Terminal || !string.IsNullOrEmpty(term.DisplayName))) reducedSet.Add(term); } //Clean-up reduced set, remove pseudo terms if (reducedSet.Contains(_grammar.Eof)) reducedSet.Remove(_grammar.Eof); if (reducedSet.Contains(_grammar.SyntaxError)) reducedSet.Remove(_grammar.SyntaxError); }
private void ComputeAndResolveConflicts() { foreach(var state in Data.States) { if(!state.BuilderData.IsInadequate) continue; //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach(var item in stateData.ReduceItems) { foreach(var lkh in item.Lookaheads) { if(allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach(var term in stateData.ShiftTerminals) if(allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } //Now resolve conflicts by hints and precedence ------------------------------------------------------- if(stateData.Conflicts.Count > 0) { //Hints foreach (var conflict in stateData.Conflicts) ResolveConflictByHints(state, conflict); stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //Precedence foreach (var conflict in stateData.Conflicts) ResolveConflictByPrecedence(state, conflict); stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //if we still have conflicts, report and assign default action if (stateData.Conflicts.Count > 0) ReportAndCreateDefaultActionsForConflicts(state); }//if Conflicts.Count > 0 } }
private void ReportParseConflicts(string description, ParserState state, BnfTermSet lookaheads) { string msg = description + " State " + state.Name + ", lookaheads: " + lookaheads.ToString(); AddError(msg); }
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) srConflicts.Add(lkh); //S-R conflict } else if (reduceLkhds.Contains(lkh)) rrConflicts.Add(lkh); //R-R conflict reduceLkhds.Add(lkh); }//foreach lkh }//foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) return; //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) if (!item.Core.IsInitial) //only non-initial _coresToAddWrapTailHint.Add(item.Core); foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) _coresToAddWrapTailHint.Add(source.Core); } } ReportAndCreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
}//method /* Detecting conflicts that cannot be resolved by tail wrapping 1. Shift-reduce conflicts. If inadequate state S has shift item based on the same core as source of one of reduced lookaheads of reduce item, then the conflict is unresolvable - no wrapping of lookahead would resolve ambiguity 2. Reduce-reduce conflict. If reduce items in inadequate state have reduced lookaheads with sources having the same core (LR0 item) then we have unresolvable conflict. Wrapping of the item tail would produce the same new non-terminal as lookahead in both conflicting reduce items. */ private void DetectConflictsUnresolvableByRestructuring(ParserState state) { //compute R-R and S-R conflicting lookaheads var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); var conflictSources = new LR0ItemSet(); foreach (var conflict in state.BuilderData.Conflicts) { var nonConflictingSourceCores = new LR0ItemSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var source in reduceItem.ReducedLookaheadSources) { if (source.Core.Current != conflict) continue; if (state.BuilderData.Cores.Contains(source.Core)) { //we have unresolvable shift-reduce srConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else if (nonConflictingSourceCores.Contains(source.Core)) { //unresolvable reduce-reduce rrConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else nonConflictingSourceCores.Add(source.Core); }//foreach source }//foreach item }//foreach conflict if (srConflicts.Count > 0) ReportParseConflicts("Ambiguous grammar, unresolvable shift-reduce conflicts.", state, srConflicts); if (rrConflicts.Count > 0) ReportParseConflicts("Ambiguous grammar, unresolvable reduce-reduce conflicts.", state, rrConflicts); //create default actions and remove them from conflict list, so we don't deal with them anymore CreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); } // method
}//method #region some explanations //Computes non-canonical lookaheads and jump lookaheads - those that cause jump // to non-canonical state // We are doing it top-down way, starting from most reduced lookaheads - they are not conflicting. // (If there were conflicting reduced lookaheads in a state initially, the grammar transformation algorithm // should have already wrapped them into non-conflicting "tail" non-terminals.) // We want to eliminate reduced lookaheads as much as possible, and replace them with expanded "child" // terms, to have only those non-canonical lookaheads that are absolutely necessary. // So for each reduced lookahead we check if we can replace it with its expanded, "child" terms // (from DirectFirsts set). We do it only if lookaheads child terms are all non-conflicting as lookaheads in // the state. If however, at least one child is conflicting, the reduced parent should stay. // What if we have some children conflicting and some not? We leave the parent reduced lookahead in state, // to cover (hide) the conflicting children, but we also add non-conflicting children as well, to allow // the parser automaton to use them (in canonical state) as soon as they are recognized, without need // to reduce the parent and switch back to canonical state. #endregion private void ComputeStateNonCanonicalLookaheads(ParserState state) { var stateData = state.BuilderData; //rename for shorter code var jumps = stateData.JumpLookaheads; // conflicting lookaheads, that must result in jump to non-canonical state var valids = stateData.NonCanonicalLookaheads; // valid non-canonical lookaheads, non-terminals only jumps.Clear(); valids.Clear(); var alreadyChecked = new BnfTermSet(); var toCheck = new BnfTermSet(); //terms to check for expansion //1. precompute initial set to check foreach (var reduceItem in stateData.ReduceItems) toCheck.UnionWith(reduceItem.ReducedLookaheads); toCheck.RemoveWhere(t => t is Terminal); //we are interested in non-terminals only //2. Try to expand all initial (reduced) lookaheads, and replace original lookaheads with expanded versions while (toCheck.Count > 0) { // do until no terms to check left var lkhInCheck = toCheck.First() as NonTerminal; toCheck.Remove(lkhInCheck); //to prevent repeated checking of mutually recursive terms if (alreadyChecked.Contains(lkhInCheck)) continue; alreadyChecked.Add(lkhInCheck); //Now check children for conflicts; go through all direct firsts of lkhInCheck and check them for conflicts bool hasJumpChild = false; foreach (var lkhChild in lkhInCheck.DirectFirsts) { if (lkhChild == lkhInCheck) continue; if (jumps.Contains(lkhChild)) { hasJumpChild = true; continue; } var ntChild = lkhChild as NonTerminal; if (ntChild != null && valids.Contains(ntChild)) continue; //the child has not been tested yet; check if it is a conflict in current state var occurCount = GetLookaheadOccurenceCount(state, lkhChild); if (occurCount > 1) { //possible conflict, check precedence if (lkhChild.IsSet(TermOptions.UsePrecedence)) { if (ntChild != null) { valids.Add(ntChild); //if it is terminal, it is valid; if (!alreadyChecked.Contains(lkhChild)) toCheck.Add(ntChild); } //if ntChild } else { //conflict! hasJumpChild = true; jumps.Add(lkhChild); //if it is non-terminal, add its Firsts to conflict as well if (ntChild != null) { jumps.UnionWith(ntChild.Firsts); //valids.ExceptWith(ntChild.Firsts); } }//if IsSet... else... } else { //occurCount == 1 //no conflict: if it is non-terminal, add it to toCheck set to check in the future if (ntChild != null && !alreadyChecked.Contains(ntChild)) toCheck.Add(ntChild); //if nonterminal and not checked yet, add it to toCheck for further checking }//if ...else... }//foreach lkhChild //Ok, we finished checking all direct children; if at least one of them has conflict, // then lkhInCheck (parent) must stay as a lookahead - we cannot fully expand it replacing by all children if (hasJumpChild) valids.Add(lkhInCheck); }//while toCheck.Count > 0 //remove conflicts stateData.Conflicts.Clear(); }//method
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) { srConflicts.Add(lkh); //S-R conflict } } else if (reduceLkhds.Contains(lkh)) { rrConflicts.Add(lkh); //R-R conflict } reduceLkhds.Add(lkh); } //foreach lkh } //foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) { return; } //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) { if (!item.Core.IsInitial) //only non-initial { _coresToAddWrapTailHint.Add(item.Core); } } foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) { _coresToAddWrapTailHint.Add(source.Core); } } } //still report them as conflicts ReportParseConflicts(state, srConflicts, rrConflicts); //create default actions and remove conflicts from list so we don't deal with them anymore foreach (var conflict in rrConflicts) { var reduceItems = stateData.ReduceItems.SelectByReducedLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }//method
}//method #region some explanations //Computes non-canonical lookaheads and jump lookaheads - those that cause jump // to non-canonical state // We are doing it top-down way, starting from most reduced lookaheads - they are not conflicting. // (If there were conflicting reduced lookaheads in a state initially, the grammar transformation algorithm // should have already wrapped them into non-conflicting "tail" non-terminals.) // We want to eliminate reduced lookaheads as much as possible, and replace them with expanded "child" // terms, to have only those non-canonical lookaheads that are absolutely necessary. // So for each reduced lookahead we check if we can replace it with its expanded, "child" terms // (from DirectFirsts set). We do it only if lookaheads child terms are all non-conflicting as lookaheads in // the state. If however, at least one child is conflicting, the reduced parent should stay. // What if we have some children conflicting and some not? We leave the parent reduced lookahead in state, // to cover (hide) the conflicting children, but we also add non-conflicting children as well, to allow // the parser automaton to use them (in canonical state) as soon as they are recognized, without need // to reduce the parent and switch back to canonical state. #endregion private void ComputeStateNonCanonicalLookaheads(ParserState state) { var stateData = state.BuilderData; //rename for shorter code var jumps = stateData.JumpLookaheads; // conflicting lookaheads, that must result in jump to non-canonical state var valids = stateData.NonCanonicalLookaheads; // valid non-canonical lookaheads, non-terminals only jumps.Clear(); valids.Clear(); var alreadyChecked = new BnfTermSet(); var toCheck = new BnfTermSet(); //terms to check for expansion //1. precompute initial set to check foreach (var reduceItem in stateData.ReduceItems) { toCheck.UnionWith(reduceItem.ReducedLookaheads); } toCheck.RemoveWhere(t => t is Terminal); //we are interested in non-terminals only //2. Try to expand all initial (reduced) lookaheads, and replace original lookaheads with expanded versions while (toCheck.Count > 0) // do until no terms to check left { var lkhInCheck = toCheck.First() as NonTerminal; toCheck.Remove(lkhInCheck); //to prevent repeated checking of mutually recursive terms if (alreadyChecked.Contains(lkhInCheck)) { continue; } alreadyChecked.Add(lkhInCheck); //Now check children for conflicts; go through all direct firsts of lkhInCheck and check them for conflicts bool hasJumpChild = false; foreach (var lkhChild in lkhInCheck.DirectFirsts) { if (lkhChild == lkhInCheck) { continue; } if (jumps.Contains(lkhChild)) { hasJumpChild = true; continue; } var ntChild = lkhChild as NonTerminal; if (ntChild != null && valids.Contains(ntChild)) { continue; } //the child has not been tested yet; check if it is a conflict in current state var occurCount = GetLookaheadOccurenceCount(state, lkhChild); if (occurCount > 1) { //possible conflict, check precedence if (lkhChild.IsSet(TermOptions.UsePrecedence)) { if (ntChild != null) { valids.Add(ntChild); //if it is terminal, it is valid; if (!alreadyChecked.Contains(lkhChild)) { toCheck.Add(ntChild); } } //if ntChild } else { //conflict! hasJumpChild = true; jumps.Add(lkhChild); //if it is non-terminal, add its Firsts to conflict as well if (ntChild != null) { jumps.UnionWith(ntChild.Firsts); //valids.ExceptWith(ntChild.Firsts); } } //if IsSet... else... } else //occurCount == 1 //no conflict: if it is non-terminal, add it to toCheck set to check in the future { if (ntChild != null && !alreadyChecked.Contains(ntChild)) { toCheck.Add(ntChild); //if nonterminal and not checked yet, add it to toCheck for further checking } }//if ...else... }//foreach lkhChild //Ok, we finished checking all direct children; if at least one of them has conflict, // then lkhInCheck (parent) must stay as a lookahead - we cannot fully expand it replacing by all children if (hasJumpChild) { valids.Add(lkhInCheck); } }//while toCheck.Count > 0 //remove conflicts stateData.Conflicts.Clear(); }//method
private void ReportAndCreateDefaultActionsForConflicts(ParserState state, BnfTermSet shiftReduceConflicts, BnfTermSet reduceReduceConflicts) { var stateData = state.BuilderData; if (shiftReduceConflicts.Count > 0) _language.Errors.Add(GrammarErrorLevel.Conflict, state, "Shift-reduce conflict. State {0}, lookaheads [{1}]. Set to shift as preferred action.", state, shiftReduceConflicts.ToString()); if (reduceReduceConflicts.Count > 0) _language.Errors.Add(GrammarErrorLevel.Conflict, state, "Reduce-reduce conflict. State {0}, lookaheads: {1}. Set to reduce on first production in conflict set.", state, reduceReduceConflicts.ToString()); //Create default actions for these conflicts. For shift-reduce, default action is shift, and shift action already // exist for all shifts from the state, so we don't need to do anything, only report it //For reduce-reduce create reduce actions for the first reduce item (whatever comes first in the set). foreach (var conflict in reduceReduceConflicts) { var reduceItems = stateData.ReduceItems.SelectByLookahead(conflict); var firstProd = reduceItems.First().Core.Production; var action = ParserAction.CreateReduce(firstProd); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(shiftReduceConflicts); stateData.ResolvedConflicts.UnionWith(reduceReduceConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
private void ReportParseConflicts(ParserState state, BnfTermSet shiftReduceConflicts, BnfTermSet reduceReduceConflicts) { if (shiftReduceConflicts.Count > 0) ReportParseConflicts("Shift-reduce conflict.", state, shiftReduceConflicts); if (reduceReduceConflicts.Count > 0) ReportParseConflicts("Reduce-reduce conflict.", state, reduceReduceConflicts); }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) if (nt.AstNodeCreator != null || nt.AstNodeType != null) ntSet.Add(nt); if (ntSet.Count > 0) this._language.Errors.Add(GrammarErrorLevel.Warning, null, "Warning: AstNodeType or AstNodeCreator is set in some non-terminals, but LanguageFlags.CreateAst flag is not set."); } }
//computes DirectFirsts, Firsts for non-terminals and productions private static void ComputeFirsts(GrammarData data) { //compute prod direct firsts and initialize NT.Firsts foreach (var nt in data.NonTerminals) { foreach (var prod in nt.Productions) { foreach (var term in prod.RValues) { prod.DirectFirsts.Add(term); nt.DirectFirsts.Add(term); nt.Firsts.Add(term); if (!term.IsSet(TermOptions.IsNullable)) { break; //foreach term } } } }//foreach nt //propagate NT.Firsts int time = 0; var done = false; var newSet = new BnfTermSet(); while (!done) { done = true; foreach (var nt in data.NonTerminals) { newSet.Clear(); foreach (var first in nt.Firsts) { var ntFirst = first as NonTerminal; if (ntFirst != null && ntFirst._lastChanged >= nt._lastChecked) { newSet.UnionWith(ntFirst.Firsts); } } nt._lastChecked = time++; var oldCount = nt.Firsts.Count; nt.Firsts.UnionWith(newSet); if (nt.Firsts.Count > oldCount) { done = false; nt._lastChanged = time; } } //foreach nt } //while //compute prod.Firsts foreach (var nt in data.NonTerminals) { foreach (var prod in nt.Productions) { prod.Firsts.UnionWith(prod.DirectFirsts); foreach (var directFirst in prod.DirectFirsts) { var ntDirectFirst = directFirst as NonTerminal; if (ntDirectFirst != null) { prod.Firsts.UnionWith(ntDirectFirst.Firsts); } } //foreach directFirst } //foreach prod } //foreach nt } //method
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) srConflicts.Add(lkh); //S-R conflict } else if (reduceLkhds.Contains(lkh)) rrConflicts.Add(lkh); //R-R conflict reduceLkhds.Add(lkh); }//foreach lkh }//foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) return; //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) if (!item.Core.IsInitial) //only non-initial _coresToAddWrapTailHint.Add(item.Core); foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) _coresToAddWrapTailHint.Add(source.Core); } } //still report them as conflicts ReportParseConflicts(state, srConflicts, rrConflicts); //create default actions and remove conflicts from list so we don't deal with them anymore foreach (var conflict in rrConflicts) { var reduceItems = stateData.ReduceItems.SelectByReducedLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }//method
//computes DirectFirsts, Firsts for non-terminals and productions private static void ComputeFirsts(GrammarData data) { //compute prod direct firsts and initialize NT.Firsts foreach (var nt in data.NonTerminals) { foreach (var prod in nt.Productions) { foreach (var term in prod.RValues) { prod.DirectFirsts.Add(term); nt.DirectFirsts.Add(term); nt.Firsts.Add(term); if (!term.IsSet(TermOptions.IsNullable)) break; //foreach term } } }//foreach nt //propagate NT.Firsts int time = 0; var done = false; var newSet = new BnfTermSet(); while (!done) { done = true; foreach (var nt in data.NonTerminals) { newSet.Clear(); foreach (var first in nt.Firsts) { var ntFirst = first as NonTerminal; if (ntFirst != null && ntFirst._lastChanged >= nt._lastChecked) newSet.UnionWith(ntFirst.Firsts); } nt._lastChecked = time++; var oldCount = nt.Firsts.Count; nt.Firsts.UnionWith(newSet); if (nt.Firsts.Count > oldCount) { done = false; nt._lastChanged = time; } }//foreach nt }//while //compute prod.Firsts foreach (var nt in data.NonTerminals) { foreach (var prod in nt.Productions) { prod.Firsts.UnionWith(prod.DirectFirsts); foreach (var directFirst in prod.DirectFirsts) { var ntDirectFirst = directFirst as NonTerminal; if (ntDirectFirst != null) prod.Firsts.UnionWith(ntDirectFirst.Firsts); }//foreach directFirst }//foreach prod }//foreach nt }
private void RecomputeAndResolveConflicts(ParserState state) { if (!state.BuilderData.IsInadequate) return; state.BuilderData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce foreach (var item in state.BuilderData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce foreach (var term in state.BuilderData.ShiftTerms) if (allLkhds.Contains(term) && !state.BuilderData.JumpLookaheads.Contains(term)) { state.BuilderData.Conflicts.Add(term); } state.BuilderData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); state.BuilderData.Conflicts.ExceptWith(state.BuilderData.JumpLookaheads); //Now resolve conflicts by hints and precedence ResolveConflictsByHints(state); ResolveConflictsByPrecedence(state); }
private void ComputeConflicts() { foreach(var state in _data.States) { if(!state.BuilderData.IsInadequate) continue; //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach(var item in stateData.ReduceItems) { foreach(var lkh in item.Lookaheads) { if(allLkhds.Contains(lkh)) state.BuilderData.Conflicts.Add(lkh); allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach(var term in stateData.ShiftTerminals) if(allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } }
private void __ReportParseConflicts(String message, ParserState state, BnfTermSet conflicts) { if (conflicts.Count > 0) _language.Errors.Add(GrammarErrorLevel.Conflict, state, message + " State {0} on inputs: {1}", state, conflicts); }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) if (nt.NodeCreator != null || nt.NodeType != null) ntSet.Add(nt); if (ntSet.Count > 0) this._language.Errors.Add("Warning: LanguageFlags.CreateAst flag is not set in grammar's Flags, but there are" + " non-terminals that have NodeType or NodeCreator property set. If you want Irony to construct AST tree during parsing," + " set CreateAst flag in Grammar. Non-terminals: " + ntSet.ToString()); } }//method