public virtual void VerifyLanguageData() { var gd = Context.Language.GrammarData; //Collect all terminals and non-terminals var terms = new BnfTermSet(); //SL does not understand co/contravariance, so doing merge one-by-one foreach (var t in gd.Terminals) terms.Add(t); foreach (var t in gd.NonTerminals) terms.Add(t); var missingList = new BnfTermList(); foreach (var term in terms) { var terminal = term as Terminal; if (terminal != null && terminal.Category != TokenCategory.Content) continue; //only content terminals if (term.Flags.IsSet(TermFlags.NoAstNode)) continue; var config = term.AstConfig; if (config.NodeCreator != null || config.DefaultNodeCreator != null) continue; //We must check NodeType if (config.NodeType == null) config.NodeType = GetDefaultNodeType(term); if (config.NodeType == null) missingList.Add(term); else config.DefaultNodeCreator = CompileDefaultNodeCreator(config.NodeType); } if (missingList.Count > 0) // AST node type is not specified for term {0}. Either assign Term.AstConfig.NodeType, or specify default type(s) in AstBuilder. Context.AddMessage(ErrorLevel.Error, SourceLocation.Empty, Resources.ErrNodeTypeNotSetOn, string.Join(", ", missingList)); Context.Language.AstDataVerified = true; }
public virtual void VerifyLanguageData() { var gd = Context.Language.GrammarData; //Collect all terminals and non-terminals var terms = new BnfTermSet(); //SL does not understand co/contravariance, so doing merge one-by-one foreach (var t in gd.Terminals) { terms.Add(t); } foreach (var t in gd.NonTerminals) { terms.Add(t); } var missingList = new BnfTermList(); foreach (var term in terms) { var terminal = term as Terminal; if (terminal != null && terminal.Category != TokenCategory.Content) { continue; //only content terminals } if (term.Flags.IsSet(TermFlags.NoAstNode)) { continue; } var config = term.AstConfig; if (config.NodeCreator != null || config.DefaultNodeCreator != null) { continue; } //We must check NodeType if (config.NodeType == null) { config.NodeType = GetDefaultNodeType(term); } if (config.NodeType == null) { missingList.Add(term); } else { config.DefaultNodeCreator = CompileDefaultNodeCreator(config.NodeType); } } if (missingList.Count > 0) { // AST node type is not specified for term {0}. Either assign Term.AstConfig.NodeType, or specify default type(s) in AstBuilder. Context.AddMessage(ErrorLevel.Error, SourceLocation.Empty, Resources.ErrNodeTypeNotSetOn, missingList.ToString()); } Context.Language.AstDataVerified = true; }
private void RecomputeAndResolveConflicts(ParserState state) { if (!state.BuilderData.IsInadequate) { return; } state.BuilderData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce foreach (var item in state.BuilderData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce foreach (var term in state.BuilderData.ShiftTerms) { if (allLkhds.Contains(term) && !state.BuilderData.JumpLookaheads.Contains(term)) { state.BuilderData.Conflicts.Add(term); } } state.BuilderData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); state.BuilderData.Conflicts.ExceptWith(state.BuilderData.JumpLookaheads); //Now resolve conflicts by hints and precedence ResolveConflictsByHints(state); ResolveConflictsByPrecedence(state); }//method
} //method #endregion #region Analyzing and resolving conflicts private void ComputeConflicts() { foreach (var state in _data.States) { if (!state.BuilderData.IsInadequate) { continue; } //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach (var item in stateData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach (var term in stateData.ShiftTerminals) { if (allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } } }//method
} //method #endregion #region Analyzing and resolving conflicts private void ComputeAndResolveConflicts() { foreach (var state in Data.States) { if (!state.BuilderData.IsInadequate) { continue; } //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach (var item in stateData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); } //foreach lkh } //foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach (var term in stateData.ShiftTerminals) { if (allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } //Now resolve conflicts by hints and precedence ------------------------------------------------------- if (stateData.Conflicts.Count > 0) { //Hints foreach (var conflict in stateData.Conflicts) { ResolveConflictByHints(state, conflict); } stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //Precedence foreach (var conflict in stateData.Conflicts) { ResolveConflictByPrecedence(state, conflict); } stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //if we still have conflicts, report and assign default action if (stateData.Conflicts.Count > 0) { ReportAndCreateDefaultActionsForConflicts(state); } } //if Conflicts.Count > 0 } } //method
public void AddItem(LR0Item core) { //Check if a core had been already added. If yes, simply return if (!AllCores.Add(core)) { return; } //Create new item, add it to AllItems, InitialItems, ReduceItems or ShiftItems var item = new LRItem(State, core); AllItems.Add(item); if (item.Core.IsFinal) { ReduceItems.Add(item); } else { ShiftItems.Add(item); } if (item.Core.IsInitial) { InitialItems.Add(item); } if (core.IsFinal) { return; } //Add current term to ShiftTerms if (!ShiftTerms.Add(core.Current)) { return; } if (core.Current is Terminal) { ShiftTerminals.Add(core.Current as Terminal); } //If current term (core.Current) is a new non-terminal, expand it var currNt = core.Current as NonTerminal; if (currNt == null) { return; } foreach (var prod in currNt.Productions) { AddItem(prod.LR0Items[0]); } }//method
}//method /* Detecting conflicts that cannot be resolved by tail wrapping * 1. Shift-reduce conflicts. If inadequate state S has shift item based on the same core as source * of one of reduced lookaheads of reduce item, then the conflict is unresolvable - * no wrapping of lookahead would resolve ambiguity * 2. Reduce-reduce conflict. If reduce items in inadequate state have reduced lookaheads * with sources having the same core (LR0 item) then we have unresolvable conflict. Wrapping of the item tail would produce * the same new non-terminal as lookahead in both conflicting reduce items. */ private void DetectConflictsUnresolvableByRestructuring(ParserState state) { //compute R-R and S-R conflicting lookaheads var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); var conflictSources = new LR0ItemSet(); foreach (var conflict in state.BuilderData.Conflicts) { var nonConflictingSourceCores = new LR0ItemSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var source in reduceItem.ReducedLookaheadSources) { if (source.Core.Current != conflict) { continue; } if (state.BuilderData.Cores.Contains(source.Core)) //we have unresolvable shift-reduce { srConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else if (nonConflictingSourceCores.Contains(source.Core)) //unresolvable reduce-reduce { rrConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else { nonConflictingSourceCores.Add(source.Core); } } //foreach source } //foreach item } //foreach conflict if (srConflicts.Count > 0) { ReportParseConflicts("Ambiguous grammar, unresolvable shift-reduce conflicts.", state, srConflicts); } if (rrConflicts.Count > 0) { ReportParseConflicts("Ambiguous grammar, unresolvable reduce-reduce conflicts.", state, rrConflicts); } //create default actions and remove them from conflict list, so we don't deal with them anymore CreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); } // method
}//method public void AddItem(LRItem item) { if (AllItems.Contains(item)) { return; } AllItems.Add(item); Cores.Add(item.Core); if (item.Core.IsFinal) { ReduceItems.Add(item); } else { ShiftItems.Add(item); ShiftTerms.Add(item.Core.Current); } }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) { if (nt.NodeCreator != null || nt.NodeType != null) { ntSet.Add(nt); } } if (ntSet.Count > 0) { this._language.Errors.Add("Warning: LanguageFlags.CreateAst flag is not set in grammar's Flags, but there are" + " non-terminals that have NodeType or NodeCreator property set. If you want Irony to construct AST tree during parsing," + " set CreateAst flag in Grammar. Non-terminals: " + ntSet.ToString()); } } }//method
}//method #region some explanations //Computes non-canonical lookaheads and jump lookaheads - those that cause jump // to non-canonical state // We are doing it top-down way, starting from most reduced lookaheads - they are not conflicting. // (If there were conflicting reduced lookaheads in a state initially, the grammar transformation algorithm // should have already wrapped them into non-conflicting "tail" non-terminals.) // We want to eliminate reduced lookaheads as much as possible, and replace them with expanded "child" // terms, to have only those non-canonical lookaheads that are absolutely necessary. // So for each reduced lookahead we check if we can replace it with its expanded, "child" terms // (from DirectFirsts set). We do it only if lookaheads child terms are all non-conflicting as lookaheads in // the state. If however, at least one child is conflicting, the reduced parent should stay. // What if we have some children conflicting and some not? We leave the parent reduced lookahead in state, // to cover (hide) the conflicting children, but we also add non-conflicting children as well, to allow // the parser automaton to use them (in canonical state) as soon as they are recognized, without need // to reduce the parent and switch back to canonical state. #endregion private void ComputeStateNonCanonicalLookaheads(ParserState state) { var stateData = state.BuilderData; //rename for shorter code var jumps = stateData.JumpLookaheads; // conflicting lookaheads, that must result in jump to non-canonical state var valids = stateData.NonCanonicalLookaheads; // valid non-canonical lookaheads, non-terminals only jumps.Clear(); valids.Clear(); var alreadyChecked = new BnfTermSet(); var toCheck = new BnfTermSet(); //terms to check for expansion //1. precompute initial set to check foreach (var reduceItem in stateData.ReduceItems) toCheck.UnionWith(reduceItem.ReducedLookaheads); toCheck.RemoveWhere(t => t is Terminal); //we are interested in non-terminals only //2. Try to expand all initial (reduced) lookaheads, and replace original lookaheads with expanded versions while (toCheck.Count > 0) { // do until no terms to check left var lkhInCheck = toCheck.First() as NonTerminal; toCheck.Remove(lkhInCheck); //to prevent repeated checking of mutually recursive terms if (alreadyChecked.Contains(lkhInCheck)) continue; alreadyChecked.Add(lkhInCheck); //Now check children for conflicts; go through all direct firsts of lkhInCheck and check them for conflicts bool hasJumpChild = false; foreach (var lkhChild in lkhInCheck.DirectFirsts) { if (lkhChild == lkhInCheck) continue; if (jumps.Contains(lkhChild)) { hasJumpChild = true; continue; } var ntChild = lkhChild as NonTerminal; if (ntChild != null && valids.Contains(ntChild)) continue; //the child has not been tested yet; check if it is a conflict in current state var occurCount = GetLookaheadOccurenceCount(state, lkhChild); if (occurCount > 1) { //possible conflict, check precedence if (lkhChild.IsSet(TermOptions.UsePrecedence)) { if (ntChild != null) { valids.Add(ntChild); //if it is terminal, it is valid; if (!alreadyChecked.Contains(lkhChild)) toCheck.Add(ntChild); } //if ntChild } else { //conflict! hasJumpChild = true; jumps.Add(lkhChild); //if it is non-terminal, add its Firsts to conflict as well if (ntChild != null) { jumps.UnionWith(ntChild.Firsts); //valids.ExceptWith(ntChild.Firsts); } }//if IsSet... else... } else { //occurCount == 1 //no conflict: if it is non-terminal, add it to toCheck set to check in the future if (ntChild != null && !alreadyChecked.Contains(ntChild)) toCheck.Add(ntChild); //if nonterminal and not checked yet, add it to toCheck for further checking }//if ...else... }//foreach lkhChild //Ok, we finished checking all direct children; if at least one of them has conflict, // then lkhInCheck (parent) must stay as a lookahead - we cannot fully expand it replacing by all children if (hasJumpChild) valids.Add(lkhInCheck); }//while toCheck.Count > 0 //remove conflicts stateData.Conflicts.Clear(); }//method
private void ComputeAndResolveConflicts() { foreach(var state in Data.States) { if(!state.BuilderData.IsInadequate) continue; //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach(var item in stateData.ReduceItems) { foreach(var lkh in item.Lookaheads) { if(allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach(var term in stateData.ShiftTerminals) if(allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } //Now resolve conflicts by hints and precedence ------------------------------------------------------- if(stateData.Conflicts.Count > 0) { //Hints foreach (var conflict in stateData.Conflicts) ResolveConflictByHints(state, conflict); stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //Precedence foreach (var conflict in stateData.Conflicts) ResolveConflictByPrecedence(state, conflict); stateData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); //if we still have conflicts, report and assign default action if (stateData.Conflicts.Count > 0) ReportAndCreateDefaultActionsForConflicts(state); }//if Conflicts.Count > 0 } }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) if (nt.AstNodeCreator != null || nt.AstNodeType != null) ntSet.Add(nt); if (ntSet.Count > 0) this._language.Errors.Add(GrammarErrorLevel.Warning, null, "Warning: AstNodeType or AstNodeCreator is set in some non-terminals, but LanguageFlags.CreateAst flag is not set."); } }
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) { srConflicts.Add(lkh); //S-R conflict } } else if (reduceLkhds.Contains(lkh)) { rrConflicts.Add(lkh); //R-R conflict } reduceLkhds.Add(lkh); } //foreach lkh } //foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) { return; } //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) { if (!item.Core.IsInitial) //only non-initial { _coresToAddWrapTailHint.Add(item.Core); } } foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) { _coresToAddWrapTailHint.Add(source.Core); } } } //still report them as conflicts ReportParseConflicts(state, srConflicts, rrConflicts); //create default actions and remove conflicts from list so we don't deal with them anymore foreach (var conflict in rrConflicts) { var reduceItems = stateData.ReduceItems.SelectByReducedLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }//method
}//method #region some explanations //Computes non-canonical lookaheads and jump lookaheads - those that cause jump // to non-canonical state // We are doing it top-down way, starting from most reduced lookaheads - they are not conflicting. // (If there were conflicting reduced lookaheads in a state initially, the grammar transformation algorithm // should have already wrapped them into non-conflicting "tail" non-terminals.) // We want to eliminate reduced lookaheads as much as possible, and replace them with expanded "child" // terms, to have only those non-canonical lookaheads that are absolutely necessary. // So for each reduced lookahead we check if we can replace it with its expanded, "child" terms // (from DirectFirsts set). We do it only if lookaheads child terms are all non-conflicting as lookaheads in // the state. If however, at least one child is conflicting, the reduced parent should stay. // What if we have some children conflicting and some not? We leave the parent reduced lookahead in state, // to cover (hide) the conflicting children, but we also add non-conflicting children as well, to allow // the parser automaton to use them (in canonical state) as soon as they are recognized, without need // to reduce the parent and switch back to canonical state. #endregion private void ComputeStateNonCanonicalLookaheads(ParserState state) { var stateData = state.BuilderData; //rename for shorter code var jumps = stateData.JumpLookaheads; // conflicting lookaheads, that must result in jump to non-canonical state var valids = stateData.NonCanonicalLookaheads; // valid non-canonical lookaheads, non-terminals only jumps.Clear(); valids.Clear(); var alreadyChecked = new BnfTermSet(); var toCheck = new BnfTermSet(); //terms to check for expansion //1. precompute initial set to check foreach (var reduceItem in stateData.ReduceItems) { toCheck.UnionWith(reduceItem.ReducedLookaheads); } toCheck.RemoveWhere(t => t is Terminal); //we are interested in non-terminals only //2. Try to expand all initial (reduced) lookaheads, and replace original lookaheads with expanded versions while (toCheck.Count > 0) // do until no terms to check left { var lkhInCheck = toCheck.First() as NonTerminal; toCheck.Remove(lkhInCheck); //to prevent repeated checking of mutually recursive terms if (alreadyChecked.Contains(lkhInCheck)) { continue; } alreadyChecked.Add(lkhInCheck); //Now check children for conflicts; go through all direct firsts of lkhInCheck and check them for conflicts bool hasJumpChild = false; foreach (var lkhChild in lkhInCheck.DirectFirsts) { if (lkhChild == lkhInCheck) { continue; } if (jumps.Contains(lkhChild)) { hasJumpChild = true; continue; } var ntChild = lkhChild as NonTerminal; if (ntChild != null && valids.Contains(ntChild)) { continue; } //the child has not been tested yet; check if it is a conflict in current state var occurCount = GetLookaheadOccurenceCount(state, lkhChild); if (occurCount > 1) { //possible conflict, check precedence if (lkhChild.IsSet(TermOptions.UsePrecedence)) { if (ntChild != null) { valids.Add(ntChild); //if it is terminal, it is valid; if (!alreadyChecked.Contains(lkhChild)) { toCheck.Add(ntChild); } } //if ntChild } else { //conflict! hasJumpChild = true; jumps.Add(lkhChild); //if it is non-terminal, add its Firsts to conflict as well if (ntChild != null) { jumps.UnionWith(ntChild.Firsts); //valids.ExceptWith(ntChild.Firsts); } } //if IsSet... else... } else //occurCount == 1 //no conflict: if it is non-terminal, add it to toCheck set to check in the future { if (ntChild != null && !alreadyChecked.Contains(ntChild)) { toCheck.Add(ntChild); //if nonterminal and not checked yet, add it to toCheck for further checking } }//if ...else... }//foreach lkhChild //Ok, we finished checking all direct children; if at least one of them has conflict, // then lkhInCheck (parent) must stay as a lookahead - we cannot fully expand it replacing by all children if (hasJumpChild) { valids.Add(lkhInCheck); } }//while toCheck.Count > 0 //remove conflicts stateData.Conflicts.Clear(); }//method
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) srConflicts.Add(lkh); //S-R conflict } else if (reduceLkhds.Contains(lkh)) rrConflicts.Add(lkh); //R-R conflict reduceLkhds.Add(lkh); }//foreach lkh }//foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) return; //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) if (!item.Core.IsInitial) //only non-initial _coresToAddWrapTailHint.Add(item.Core); foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) _coresToAddWrapTailHint.Add(source.Core); } } ReportAndCreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) srConflicts.Add(lkh); //S-R conflict } else if (reduceLkhds.Contains(lkh)) rrConflicts.Add(lkh); //R-R conflict reduceLkhds.Add(lkh); }//foreach lkh }//foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) return; //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) if (!item.Core.IsInitial) //only non-initial _coresToAddWrapTailHint.Add(item.Core); foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) _coresToAddWrapTailHint.Add(source.Core); } } //still report them as conflicts ReportParseConflicts(state, srConflicts, rrConflicts); //create default actions and remove conflicts from list so we don't deal with them anymore foreach (var conflict in rrConflicts) { var reduceItems = stateData.ReduceItems.SelectByReducedLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }//method
}//method /* Detecting conflicts that cannot be resolved by tail wrapping 1. Shift-reduce conflicts. If inadequate state S has shift item based on the same core as source of one of reduced lookaheads of reduce item, then the conflict is unresolvable - no wrapping of lookahead would resolve ambiguity 2. Reduce-reduce conflict. If reduce items in inadequate state have reduced lookaheads with sources having the same core (LR0 item) then we have unresolvable conflict. Wrapping of the item tail would produce the same new non-terminal as lookahead in both conflicting reduce items. */ private void DetectConflictsUnresolvableByRestructuring(ParserState state) { //compute R-R and S-R conflicting lookaheads var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); var conflictSources = new LR0ItemSet(); foreach (var conflict in state.BuilderData.Conflicts) { var nonConflictingSourceCores = new LR0ItemSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var source in reduceItem.ReducedLookaheadSources) { if (source.Core.Current != conflict) continue; if (state.BuilderData.Cores.Contains(source.Core)) { //we have unresolvable shift-reduce srConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else if (nonConflictingSourceCores.Contains(source.Core)) { //unresolvable reduce-reduce rrConflicts.Add(source.Core.Current); conflictSources.Add(source.Core); } else nonConflictingSourceCores.Add(source.Core); }//foreach source }//foreach item }//foreach conflict if (srConflicts.Count > 0) ReportParseConflicts("Ambiguous grammar, unresolvable shift-reduce conflicts.", state, srConflicts); if (rrConflicts.Count > 0) ReportParseConflicts("Ambiguous grammar, unresolvable reduce-reduce conflicts.", state, rrConflicts); //create default actions and remove them from conflict list, so we don't deal with them anymore CreateDefaultActionsForConflicts(state, srConflicts, rrConflicts); } // method
private void ComputeConflicts() { foreach(var state in _data.States) { if(!state.BuilderData.IsInadequate) continue; //first detect conflicts var stateData = state.BuilderData; stateData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce -------------------------------------------------------------------------------------- foreach(var item in stateData.ReduceItems) { foreach(var lkh in item.Lookaheads) { if(allLkhds.Contains(lkh)) state.BuilderData.Conflicts.Add(lkh); allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce --------------------------------------------------------------------------------------- foreach(var term in stateData.ShiftTerminals) if(allLkhds.Contains(term)) { stateData.Conflicts.Add(term); } } }
private void RecomputeAndResolveConflicts(ParserState state) { if (!state.BuilderData.IsInadequate) return; state.BuilderData.Conflicts.Clear(); var allLkhds = new BnfTermSet(); //reduce/reduce foreach (var item in state.BuilderData.ReduceItems) { foreach (var lkh in item.Lookaheads) { if (allLkhds.Contains(lkh)) { state.BuilderData.Conflicts.Add(lkh); } allLkhds.Add(lkh); }//foreach lkh }//foreach item //shift/reduce foreach (var term in state.BuilderData.ShiftTerms) if (allLkhds.Contains(term) && !state.BuilderData.JumpLookaheads.Contains(term)) { state.BuilderData.Conflicts.Add(term); } state.BuilderData.Conflicts.ExceptWith(state.BuilderData.ResolvedConflicts); state.BuilderData.Conflicts.ExceptWith(state.BuilderData.JumpLookaheads); //Now resolve conflicts by hints and precedence ResolveConflictsByHints(state); ResolveConflictsByPrecedence(state); }
private void ValidateGrammar() { //Check CreateAst flag and give a warning if this flag is not set, but node types or NodeCreator methods are assigned // in any of non-terminals if (!_grammar.FlagIsSet(LanguageFlags.CreateAst)) { var ntSet = new BnfTermSet(); foreach (var nt in _grammarData.NonTerminals) if (nt.NodeCreator != null || nt.NodeType != null) ntSet.Add(nt); if (ntSet.Count > 0) this._language.Errors.Add("Warning: LanguageFlags.CreateAst flag is not set in grammar's Flags, but there are" + " non-terminals that have NodeType or NodeCreator property set. If you want Irony to construct AST tree during parsing," + " set CreateAst flag in Grammar. Non-terminals: " + ntSet.ToString()); } }//method