public GrammarReductionConflict(ParserState state, TokenGrammarElement lookahead, ParserAction action1, ParserAction action2) { State = state; Lookahead = lookahead; Action1 = action1; Action2 = action2; }
public TransitionAction(ParserAction action, ParserState nextState) : this(action) { if (action != ParserAction.Shift && action != ParserAction.Goto) throw new Exception("Can only define the next state for the shift or goto actions."); if (nextState == null) throw new ArgumentNullException("nextState"); NextState = nextState; }
public TransitionAction(ParserAction action, Production reduceByProduction) : this(action) { if (action != ParserAction.Reduce) throw new Exception("Can only define the production to reduce by for the reduce action."); if (reduceByProduction == null) throw new ArgumentNullException("reduceByProduction"); ReduceByProduction = reduceByProduction; }
private void ExecuteOperatorAction(ParserAction action) { var realActionType = GetActionTypeForOperation(action); if (_traceEnabled) { Context.AddTrace(Resources.MsgTraceOpResolved, realActionType); } switch (realActionType) { case ParserActionType.Shift: ExecuteShift(action); break; case ParserActionType.Reduce: ExecuteReduce(action); break; }//switch }
public void TestRMEncoding( [Values(123, ParserAction.Value1Max, 0, 1)] int value1, [Values(123, 1, 2, ParserAction.Value2Max, 0, 1)] short value2) { int cell = ParserAction.EncodeModifedReduce(value1, value2); var output = ParserAction.Decode(cell); Assert.IsNotNull(output); Assert.AreEqual(ParserActionKind.Reduce, output.Kind); Assert.AreEqual(value1, output.ProductionId); Assert.AreEqual(value2, output.Size); }
private ParseTreeNode ReduceExistingList(ParserAction action) { int childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var listNode = Context.ParserStack[firstChildIndex]; //get the list already created - it is the first child node listNode.Span = ComputeNewNodeSpan(childCount); var listMember = Context.ParserStack.Top; //next list member is the last child - at the top of the stack if (ShouldSkipChildNode(listMember)) { return(listNode); } CheckCreateAstNode(listMember); listNode.ChildNodes.Add(listMember); return(listNode); }
public new static object Serialise(object o, Serialiser s) { if (s == null) { return((object)new ParserOldAction()); } ParserOldAction parserOldAction = (ParserOldAction)o; if (s.Encode) { ParserAction.Serialise((object)parserOldAction, s); s.Serialise((object)parserOldAction.m_action); return((object)null); } ParserAction.Serialise((object)parserOldAction, s); parserOldAction.m_action = (int)s.Deserialise(); return((object)parserOldAction); }
}//method private void CreateDefaultActionsForConflicts(ParserState state, BnfTermSet shiftReduceConflicts, BnfTermSet reduceReduceConflicts) { var stateData = state.BuilderData; //Create default actions for these conflicts. For shift-reduce, default action is shift, and shift action already // exist for all shifts from the state, so we don't need to do anything. For reduce-reduce create reduce actions // for the first reduce item (whatever comes first in the set). foreach (var conflict in reduceReduceConflicts) { var reduceItems = stateData.ReduceItems.SelectByLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(shiftReduceConflicts); stateData.ResolvedConflicts.UnionWith(reduceReduceConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
private ParseTreeNode ReduceTransientNonTerminal(ParserAction action) { var topIndex = Context.ParserStack.Count - 1; var childCount = action.ReduceProduction.RValues.Count; for (int i = 0; i < childCount; i++) { var child = Context.ParserStack[topIndex - i]; if (ShouldSkipChildNode(child)) { continue; } CheckCreateAstNode(child); return(child); } //Otherwise return an empty transient node; if it is part of the list, the list will skip it var span = ComputeNewNodeSpan(childCount); return(new ParseTreeNode(action.ReduceProduction, span)); }
private StreamWriter DescribeAction( IReportData data, ParserAction action, StreamWriter output, string indent) { switch (action.Kind) { case ParserActionKind.Shift: output.Write(indent); output.Write("Shift to the state I"); output.Write(action.State + ""); output.WriteLine(":"); DescribeState(data, action.State, output, indent + indent); break; case ParserActionKind.ShiftReduce: output.Write(indent); output.WriteLine("Shift-Reduce on the rule:"); output.Write(indent + indent); DescribeRule(data, action.ProductionId, output); output.WriteLine(); break; case ParserActionKind.Reduce: output.Write(indent); output.WriteLine("Reduce on the rule:"); output.Write(indent + indent); DescribeRule(data, action.ProductionId, output); output.WriteLine(); break; case ParserActionKind.Accept: output.Write(indent); output.WriteLine("Accept."); break; } return(output); }
private void BuildConflictTable() { var conflictList = new List <int>(); foreach (var conflict in transitionToConflict.Values) { var refAction = new ParserAction { Kind = ParserActionKind.Conflict, Value1 = conflictList.Count, Size = (short)conflict.Actions.Count }; actionTable.Set(conflict.State, conflict.Token, ParserAction.Encode(refAction)); foreach (var action in conflict.Actions) { conflictList.Add(ParserAction.Encode(action)); } } this.conflictActionTable = conflictList.ToArray(); }
private IEnumerable <ParserAction> GetAllParserActions(int state, int token) { var cell = data.ParserActionTable.Get(state, token); var action = ParserAction.Decode(cell); if (action == null || action.Kind == ParserActionKind.Fail) { } else if (action.Kind == ParserActionKind.Conflict) { for (int i = 0; i != action.Size; ++i) { yield return (ParserAction.Decode( data.ParserConflictActionTable[action.Value1 + i])); } } else { yield return(action); } }
private static bool AreCompatibleStates(LanguageData data, int[] terms, ITable <int> table, int p, int q) { foreach (int term in terms) { var pCell = table.Get(p, term); var qCell = table.Get(q, term); var pAction = ParserAction.Decode(pCell); var qAction = ParserAction.Decode(qCell); if (pCell != qCell && !HaveSameShifts(data, pAction, qAction)) { return(false); } if (pAction.Kind == ParserActionKind.Conflict || qAction.Kind == ParserActionKind.Conflict) { Console.WriteLine("Skipped conflicting {0}<->{1} states", p, q); return(false); } if (!HaveComptibleReductions(data, pAction, qAction)) { /* * Console.WriteLine( * "Skipped incompatible {0}<->{1} state reductions {2}<->{3}", * p, q, * pAction, qAction); */ return(false); } } return(true); }
private void ExecuteReduce(ParserAction action) { var reduceProduction = action.ReduceProduction; ParseTreeNode newNode; if (reduceProduction.IsSet(ProductionFlags.IsListBuilder)) { newNode = ReduceExistingList(action); } else if (reduceProduction.LValue.FlagIsSet(TermFlags.IsListContainer)) { newNode = ReduceListContainer(action); } else if (reduceProduction.LValue.FlagIsSet(TermFlags.IsTransient)) { newNode = ReduceTransientNonTerminal(action); } else { newNode = ReduceRegularNode(action); } //final reduce actions ---------------------------------------------------------- Context.ParserStack.Pop(reduceProduction.RValues.Count); //Push new node into stack and move to new state //First read the state from top of the stack Context.CurrentParserState = Context.ParserStack.Top.State; if (_traceEnabled) { Context.AddTrace(Resources.MsgTracePoppedState, reduceProduction.LValue.Name); } // Shift to new state (LALR) - execute shift over non-terminal var shift = Context.CurrentParserState.Actions[reduceProduction.LValue]; Context.ParserStack.Push(newNode, shift.NewState); Context.CurrentParserState = shift.NewState; }
private void ExecuteConflictAction(ParserAction action) { var args = action.ResolveConflict(_grammar, Context); switch (args.Result) { case ParserActionType.Reduce: ExecuteReduce(new ParserAction(ParserActionType.Reduce, null, args.ReduceProduction)); break; case ParserActionType.Operator: ExecuteOperatorAction(new ParserAction(ParserActionType.Operator, action.NewState, args.ReduceProduction)); break; case ParserActionType.Shift: default: ExecuteShift(action); break; } if (_traceEnabled) { Context.AddTrace(Resources.MsgTraceConflictResolved); } }
private void ExpandParserStateList(int initialIndex) { // Iterate through states (while new ones are created) and create shift transitions and new states for (int index = initialIndex; index < Data.States.Count; index++) { var state = Data.States[index]; //Get all possible shifts foreach (var term in state.BuilderData.ShiftTerms) { var shiftItems = state.BuilderData.ShiftItems.SelectByCurrent(term); //Get set of shifted cores and find/create target state var shiftedCoreItems = shiftItems.GetShiftedCores(); var newState = FindOrCreateState(shiftedCoreItems); //Create shift action var newAction = new ParserAction(ParserActionType.Shift, newState, null); state.Actions[term] = newAction; //Link items in old/new states foreach (var shiftItem in shiftItems) { shiftItem.ShiftedItem = newState.BuilderData.AllItems.FindByCore(shiftItem.Core.ShiftedItem); } //foreach shiftItem } //foreach term } //for index } //method
private void ExecuteReduce(ParserAction action) { var reduceProduction = action.ReduceProduction; ParseTreeNode resultNode; if(reduceProduction.IsSet(ProductionFlags.IsListBuilder)) resultNode = ReduceExistingList(action); else if(reduceProduction.LValue.Flags.IsSet(TermFlags.IsListContainer)) resultNode = ReduceListContainer(action); else if (reduceProduction.LValue.Flags.IsSet(TermFlags.IsTransient)) resultNode = ReduceTransientNonTerminal(action); else resultNode = ReduceRegularNode(action); //final reduce actions ---------------------------------------------------------- Context.ParserStack.Pop(reduceProduction.RValues.Count); //Push new node into stack and move to new state //First read the state from top of the stack Context.CurrentParserState = Context.ParserStack.Top.State; if (_traceEnabled) Context.AddTrace(Resources.MsgTracePoppedState, reduceProduction.LValue.Name); // Shift to new state (LALR) - execute shift over non-terminal var shift = Context.CurrentParserState.Actions[reduceProduction.LValue]; Context.ParserStack.Push(resultNode, shift.NewState); Context.CurrentParserState = shift.NewState; //Copy comment block from first child; if comments precede child node, they precede the parent as well. if (resultNode.ChildNodes.Count > 0) resultNode.Comments = resultNode.ChildNodes[0].Comments; //Invoke event reduceProduction.LValue.OnReduced(Context, reduceProduction, resultNode); }
private void ExecuteAccept(ParserAction action) { //AST nodes are created when we pop them from the stack to add to parent's child list // for top node we do it here var rootNode = Stack.Pop(); if (_grammar.FlagIsSet(LanguageFlags.CreateAst)) SafeCreateAstNode(rootNode); _context.CurrentParseTree.Root = rootNode; }
private void CreateRemainingReduceActions() { foreach (var state in Data.States) { var stateData = state.BuilderData; if (stateData.ShiftItems.Count == 0 && stateData.ReduceItems.Count == 1) { state.DefaultAction = new ParserAction(ParserActionType.Reduce, null, stateData.ReduceItems.First().Core.Production); continue; } //now create actions foreach (var item in state.BuilderData.ReduceItems) { var action = new ParserAction(ParserActionType.Reduce, null, item.Core.Production); foreach (var lkh in item.Lookaheads) { if (state.Actions.ContainsKey(lkh)) continue; state.Actions[lkh] = action; } }//foreach item }//foreach state }
//Resolve to default actions private void ReportAndCreateDefaultActionsForConflicts(ParserState state) { var shiftReduceConflicts = state.BuilderData.GetShiftReduceConflicts(); var reduceReduceConflicts = state.BuilderData.GetReduceReduceConflicts(); var stateData = state.BuilderData; if (shiftReduceConflicts.Count > 0) _language.Errors.Add(GrammarErrorLevel.Conflict, state, Resources.ErrSRConflict, state, shiftReduceConflicts.ToString()); if (reduceReduceConflicts.Count > 0) _language.Errors.Add(GrammarErrorLevel.Conflict, state, Resources.ErrRRConflict, state, reduceReduceConflicts.ToString()); //Create default actions for these conflicts. For shift-reduce, default action is shift, and shift action already // exist for all shifts from the state, so we don't need to do anything, only report it //For reduce-reduce create reduce actions for the first reduce item (whatever comes first in the set). foreach (var conflict in reduceReduceConflicts) { var reduceItems = stateData.ReduceItems.SelectByLookahead(conflict); var firstProd = reduceItems.First().Core.Production; var action = new ParserAction(ParserActionType.Reduce, null, firstProd); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(shiftReduceConflicts); stateData.ResolvedConflicts.UnionWith(reduceReduceConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }
private void ExecuteConflictAction(ParserAction action) { var args = new ConflictResolutionArgs(_context, action); _grammar.OnResolvingConflict(args); switch(args.Result) { case ParserActionType.Reduce: ExecuteReduce(args.ReduceProduction); break; case ParserActionType.Operator: ExecuteOperatorAction(action.NewState, args.ReduceProduction); break; case ParserActionType.Shift: default: ExecuteShift(action.NewState); break; } if (_currentTraceEntry != null) { _currentTraceEntry.Message = "(Conflict resolved in code) " + _currentTraceEntry.Message; } }
public ParserShift(ParserAction action, ParseState next) : base(action) { this.m_next = next; }
private void ExecuteConflictAction(ParserAction action) { var args = new ConflictResolutionArgs(Context, action); _grammar.OnResolvingConflict(args); switch(args.Result) { case ParserActionType.Reduce: ExecuteReduce(new ParserAction(ParserActionType.Reduce, null, args.ReduceProduction)); break; case ParserActionType.Operator: ExecuteOperatorAction(new ParserAction(ParserActionType.Operator, action.NewState, args.ReduceProduction)); break; case ParserActionType.Shift: default: ExecuteShift(action); break; } Context.AddTrace(O2_Misc_Microsoft_MPL_Libs.Irony_Parser.Resources.MsgTraceConflictResolved); }
// --> START EDIT ALEX private void ExecuteReduceOnError(ParserAction action) { var reduceProduction = action.ReduceProduction; //final reduce actions ---------------------------------------------------------- Context.ParserStack.Pop(1); //Push new node into stack and move to new state //First read the state from top of the stack Context.CurrentParserState = Context.ParserStack.Top.State; if (_traceEnabled) Context.AddTrace(Resources.MsgTracePoppedState, reduceProduction.LValue.Name); // Shift to new state (LALR) - execute shift over non-terminal var shift = Context.CurrentParserState.Actions[reduceProduction.LValue]; Context.ParserStack.Push(new ParseTreeNode(action.ReduceProduction.LValue), shift.NewState); Context.CurrentParserState = shift.NewState; }
public ParserCachedAction(ParserAction action, ParserSymbol symbol) { Action = action; Symbol = symbol; }
private ParseTreeNode ReduceRegularNode(ParserAction action) { var childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var span = ComputeNewNodeSpan(childCount); var newNode = new ParseTreeNode(action.ReduceProduction, span); for(int i = 0; i < childCount; i++) { var childNode = Context.ParserStack[firstChildIndex + i]; if(SkipChildNode(childNode)) continue; //skip punctuation or empty transient nodes CheckCreateAstNode(childNode); //AST nodes for lists and for terminals are created here //Inherit precedence and associativity if(childNode.Precedence != BnfTerm.NoPrecedence) { newNode.Precedence = childNode.Precedence; newNode.Associativity = childNode.Associativity; } newNode.ChildNodes.Add(childNode); }//for i return newNode; }
public ParserCachedAction(ParserAction action) { Action = action; }
public void Add(int code, ParserState state, ParserAction action, ParserState next = ParserState.Invalid) { table [(int)state << 8 | code] = (byte)((int)action << 4 | (int)(next == ParserState.Invalid ? state : next)); }
private void ExecuteNonCanonicalJump(ParserAction action) { _currentState = action.NewState; }
private bool TryRecoverImpl() { //1. We need to find a state in the stack that has a shift item based on error production (with error token), // and error terminal is current. This state would have a shift action on error token. ParserAction nextAction = FindErrorShiftActionInStackTemp(); if (nextAction == null) { return(false); } var firstBnfTerm = nextAction.NewState.Actions.Keys.FirstOrDefault(); Context.AddTrace(Resources.MsgTraceRecoverReducing); Context.AddTrace(Resources.MsgTraceRecoverAction, nextAction); // Inject faked node var newLineNode = new ParseTreeNode(firstBnfTerm); Context.ParserInputStack.Insert(0, newLineNode); var saveParserInput = Context.CurrentParserInput; Context.CurrentParserInput = newLineNode; nextAction = FindActionForStateAndInput(); while (nextAction != null && Context.CurrentParserInput != null) { switch (nextAction.ActionType) { case ParserActionType.Shift: ExecuteShift(nextAction); break; case ParserActionType.Operator: ExecuteOperatorAction(nextAction); break; case ParserActionType.Reduce: ExecuteReduce(nextAction); break; case ParserActionType.Code: ExecuteConflictAction(nextAction); break; case ParserActionType.Accept: ExecuteAccept(nextAction); break; } nextAction = FindActionForStateAndInput(); } Context.ParserInputStack.RemoveAt(0); Context.CurrentParserInput = saveParserInput; if (!Context.CurrentParserState.Actions.TryGetValue(Context.CurrentParserInput.Term, out nextAction)) { Context.ParserInputStack.Clear(); Context.CurrentParserInput = null; } return(true); //ExecuteShiftTemp(firstBnfTerm, nextAction); }//method
}//method private void ExecuteShiftTemp(BnfTerm term, ParserAction action) { Context.ParserStack.Push(new ParseTreeNode(term), action.NewState); Context.CurrentParserState = action.NewState; }
static void ProcessPGSample() { var gen = new ParserGenerator(); // Non-Terminals var E = gen.CreateNewProduction("E", false); //var T = gen.CreateNewProduction("T", false); //var F = gen.CreateNewProduction("F", false); //var func = gen.CreateNewProduction("func", false); //var arguments = gen.CreateNewProduction("args", false); // Terminals var plus = gen.CreateNewProduction("+"); // + var minus = gen.CreateNewProduction("-"); // - var multiple = gen.CreateNewProduction("*"); // * var divide = gen.CreateNewProduction("/"); // / //var id = gen.CreateNewProduction("id"); // [_$a-zA-Z][_$a-zA-Z0-9]* var op_open = gen.CreateNewProduction("("); // ( var op_close = gen.CreateNewProduction(")"); // ) var num = gen.CreateNewProduction("num"); // [0-9]+ //var split = gen.CreateNewProduction("split"); // , //exp |= exp + plus + term; //exp |= exp + minus + term; //exp |= term; //term |= term + multiple + factor; //term |= term + divide + factor; //term |= factor; //factor |= op_open + exp + op_close; //factor |= num; //factor |= id; //factor |= func; //func |= id + op_open + arguments + op_close; //arguments |= id; //arguments |= arguments + split + id; //arguments |= ParserGenerator.EmptyString; E |= E + plus + E + ParserAction.Create(x => { });; E |= E + minus + E + ParserAction.Create(x => { });; E |= E + multiple + E + ParserAction.Create(x => { });; E |= E + divide + E + ParserAction.Create(x => { });; E |= minus + E + ParserAction.Create(x => { });; E |= op_open + E + op_close + ParserAction.Create(x => { });; E |= num + ParserAction.Create(x => { });; gen.PushConflictSolver(false, new Tuple <ParserProduction, int>(E, 4)); gen.PushConflictSolver(true, multiple, divide); gen.PushConflictSolver(true, plus, minus); gen.PushStarts(E); gen.PrintProductionRules(); gen.GenerateLALR2(); gen.PrintStates(); gen.PrintTable(); Console.Instance.WriteLine(gen.GlobalPrinter.ToString()); Console.Instance.WriteLine(gen.CreateShiftReduceParserInstance().ToCSCode("Calculator")); ////////////////////////////////////////////////////// //var scanner_gen = new ScannerGenerator(); // //scanner_gen.PushRule("", @"[\r\n ]"); // Skip characters //scanner_gen.PushRule("+", @"\+"); //scanner_gen.PushRule("-", @"\-"); //scanner_gen.PushRule("*", @"\*"); //scanner_gen.PushRule("/", @"\/"); //scanner_gen.PushRule("(", @"\("); //scanner_gen.PushRule(")", @"\)"); //scanner_gen.PushRule("num", @"[0-9]+(\.[0-9]+)?([Ee][\+\-]?[0-9]+)?"); //scanner_gen.Generate(); //var ss = scanner_gen.CreateScannerInstance(); //var pp = gen.CreateShiftReduceParserInstance(); // //Action<string, string, int, int> insert = (string x, string y, int a, int b) => //{ // pp.Insert(x, y); // if (pp.Error()) throw new Exception($"[COMPILER] Parser error! L:{a}, C:{b}"); // while (pp.Reduce()) // { // var l = pp.LatestReduce(); // Console.Instance.Write(l.Production.PadLeft(8) + " => "); // Console.Instance.WriteLine(string.Join(" ", l.Childs.Select(z => z.Production))); // Console.Instance.Write(l.Production.PadLeft(8) + " => "); // Console.Instance.WriteLine(string.Join(" ", l.Childs.Select(z => z.Contents))); // pp.Insert(x, y); // if (pp.Error()) throw new Exception($"[COMPILER] Parser error! L:{a}, C:{b}"); // } //}; // //try //{ // int ll = 0; // var line = "5-(4+2*3-1)/(6+-5)"; // ss.AllocateTarget(line.Trim()); // // while (ss.Valid()) // { // var tk = ss.Next(); // if (ss.Error()) // throw new Exception("[COMPILER] Tokenize error! '" + tk + "'"); // insert(tk.Item1, tk.Item2, ll, tk.Item4); // } // // if (pp.Error()) throw new Exception(); // insert("$", "$", -1, -1); // // var tree = pp.Tree; // CALtoCS.PrintTree(tree.root, "", true); //} //catch (Exception e) //{ // Console.Instance.WriteLine(e.Message); //} }
private bool TryResolveConflictByHints(ParserState state, BnfTerm conflict) { var stateData = state.BuilderData; //reduce hints var reduceItems = stateData.ReduceItems.SelectByLookahead(conflict); foreach (var reduceItem in reduceItems) { if (reduceItem.Core.Hints != null) { foreach (var hint in reduceItem.Core.Hints) { if (hint.HintType == HintType.ResolveToReduce) { var newAction = ParserAction.CreateReduce(reduceItem.Core.Production); state.Actions[conflict] = newAction; //replace/add reduce action return(true); } } } } //Shift hints var shiftItems = stateData.ShiftItems.SelectByCurrent(conflict); foreach (var shiftItem in shiftItems) { if (shiftItem.Core.Hints != null) { foreach (var hint in shiftItem.Core.Hints) { if (hint.HintType == HintType.ResolveToShift) { //shift action is already there return(true); } } } } //code hints // first prepare data for conflict action: reduceProduction (for possible reduce) and newState (for possible shift) var reduceProduction = reduceItems.First().Core.Production; //take first of reduce productions ParserState newState = (state.Actions.ContainsKey(conflict) ? state.Actions[conflict].NewState : null); // Get all items that might contain hints; first take all shift items and reduce items in conflict; // we should also add lookahead sources of reduce items. Lookahead source is an LR item that produces the lookahead, // so if it contains a code hint right before the lookahead term, then it applies to this conflict as well. var allItems = new LRItemList(); allItems.AddRange(shiftItems); foreach (var reduceItem in reduceItems) { allItems.Add(reduceItem); allItems.AddRange(reduceItem.ReducedLookaheadSources); } // Scan all items and try to find hint with resolution type Code foreach (var item in allItems) { if (item.Core.Hints != null) { foreach (var hint in item.Core.Hints) { if (hint.HintType == HintType.ResolveInCode) { //found hint with resolution type "code" - this is instruction to use custom code here to resolve the conflict // create new ConflictAction and place it into Actions table var newAction = ParserAction.CreateCodeAction(newState, reduceProduction); state.Actions[conflict] = newAction; //replace/add reduce action return(true); } } } } return(false); }
private void ExecuteOperatorAction(ParserAction action) { var realActionType = GetActionTypeForOperation(action); switch (realActionType) { case ParserActionType.Shift: ExecuteShift(action); break; case ParserActionType.Reduce: ExecuteReduce(action); break; }//switch Context.AddTrace(O2_Misc_Microsoft_MPL_Libs.Irony_Parser.Resources.MsgTraceOpResolved, realActionType); }
private void ExecuteAccept(ParserAction action) { var rootNode = Stack.Pop(); //it might be transient node; if yes, take it's first child if (rootNode.Term.IsSet(TermOptions.IsTransient)) { rootNode = rootNode.ChildNodes[0]; } else { //otherwise, create AST node if necessary if (_grammar.FlagIsSet(LanguageFlags.CreateAst)) SafeCreateAstNode(rootNode); } rootNode.State = null; //clear the State field, we need only when node is in the stack _context.CurrentParseTree.Root = rootNode; }
public static string PrintStateList(LanguageData language) { StringBuilder sb = new StringBuilder(); foreach (ParserState state in language.ParserData.States) { sb.Append("State " + state.Name); if (state.BuilderData.IsInadequate) { sb.Append(" (Inadequate)"); } sb.AppendLine(); var srConflicts = state.BuilderData.GetShiftReduceConflicts(); if (srConflicts.Count > 0) { sb.AppendLine(" Shift-reduce conflicts on inputs: " + srConflicts.ToString()); } var ssConflicts = state.BuilderData.GetReduceReduceConflicts(); if (ssConflicts.Count > 0) { sb.AppendLine(" Reduce-reduce conflicts on inputs: " + ssConflicts.ToString()); } //LRItems if (state.BuilderData.ShiftItems.Count > 0) { sb.AppendLine(" Shift items:"); foreach (var item in state.BuilderData.ShiftItems) { sb.AppendLine(" " + item.ToString()); } } if (state.BuilderData.ReduceItems.Count > 0) { sb.AppendLine(" Reduce items:"); foreach (LRItem item in state.BuilderData.ReduceItems) { sb.AppendLine(" " + item.ToString(state.BuilderData.JumpLookaheads)); } } bool headerPrinted = false; foreach (BnfTerm key in state.Actions.Keys) { ParserAction action = state.Actions[key]; if (action.ActionType != ParserActionType.Shift && action.ActionType != ParserActionType.Jump) { continue; } if (!headerPrinted) { sb.Append(" Shifts: "); } headerPrinted = true; sb.Append(key.ToString()); if (action.ActionType == ParserActionType.Shift) { sb.Append("->"); //shift } sb.Append(action.NewState.Name); sb.Append(", "); } sb.AppendLine(); if (state.BuilderData.JumpLookaheads.Count > 0) { //two spaces between 'state' and state name - important for locating state from parser trace sb.AppendLine(" Jump to non-canonical state " + state.BuilderData.JumpTarget + " on lookaheads: " + state.BuilderData.JumpLookaheads.ToString()); } sb.AppendLine(); }//foreach return(sb.ToString()); }
static ShiftReduceParser CreateJSonParser() { var gen = new ParserGenerator(); var JSON = gen.CreateNewProduction("JSON", false); var ARRAY = gen.CreateNewProduction("ARRAY", false); var OBJECT = gen.CreateNewProduction("OBJECT", false); var MEMBERS = gen.CreateNewProduction("MEMBERS", false); var PAIR = gen.CreateNewProduction("PAIR", false); var ELEMENTS = gen.CreateNewProduction("ELEMENTS", false); var VALUE = gen.CreateNewProduction("VALUE", false); var object_starts = gen.CreateNewProduction("object_starts"); var object_ends = gen.CreateNewProduction("object_ends"); var comma = gen.CreateNewProduction("comma"); var v_pair = gen.CreateNewProduction("v_pair"); var array_starts = gen.CreateNewProduction("array_starts"); var array_ends = gen.CreateNewProduction("array_ends"); var v_true = gen.CreateNewProduction("v_true"); var v_false = gen.CreateNewProduction("v_false"); var v_null = gen.CreateNewProduction("v_null"); var v_string = gen.CreateNewProduction("v_string"); var v_number = gen.CreateNewProduction("v_number"); JSON |= OBJECT + ParserAction.Create(x => x.UserContents = x.Childs[0].UserContents); JSON |= ARRAY + ParserAction.Create(x => x.UserContents = x.Childs[0].UserContents); OBJECT |= object_starts + object_ends + ParserAction.Create(x => x.UserContents = new json_object()); OBJECT |= object_starts + MEMBERS + object_ends + ParserAction.Create(x => x.UserContents = x.Childs[1].UserContents); MEMBERS |= PAIR + ParserAction.Create(x => { var jo = new json_object(); jo.keyvalue.Add(new KeyValuePair <string, json_value>(x.Childs[0].Childs[0].Contents, x.Childs[0].Childs[2].UserContents as json_value)); x.UserContents = jo; }); MEMBERS |= PAIR + comma + MEMBERS + ParserAction.Create(x => { var jo = x.Childs[2].UserContents as json_object; jo.keyvalue.Insert(0, new KeyValuePair <string, json_value>(x.Childs[0].Childs[0].Contents, x.Childs[0].Childs[2].UserContents as json_value)); x.UserContents = jo; }); PAIR |= v_string + v_pair + VALUE + ParserAction.Create(x => { }); ARRAY |= array_starts + array_ends + ParserAction.Create(x => x.UserContents = new json_array()); ARRAY |= array_starts + ELEMENTS + array_ends + ParserAction.Create(x => x.UserContents = x.Childs[1].UserContents); ELEMENTS |= VALUE + ParserAction.Create(x => { var ja = new json_array(); ja.array.Add(x.Childs[0].UserContents as json_value); x.UserContents = ja; }); ELEMENTS |= VALUE + comma + ELEMENTS + ParserAction.Create(x => { var ja = x.Childs[2].UserContents as json_array; ja.array.Insert(0, x.Childs[0].UserContents as json_value); x.UserContents = ja; }); VALUE |= v_string + ParserAction.Create(x => x.UserContents = new json_string { str = x.Contents }); VALUE |= v_number + ParserAction.Create(x => x.UserContents = new json_numeric { numstr = x.Contents }); VALUE |= OBJECT + ParserAction.Create(x => x.UserContents = x.Childs[0].UserContents); VALUE |= ARRAY + ParserAction.Create(x => x.UserContents = x.Childs[0].UserContents); VALUE |= v_true + ParserAction.Create(x => x.UserContents = new json_state { type = json_token.v_true }); VALUE |= v_false + ParserAction.Create(x => x.UserContents = new json_state { type = json_token.v_false }); VALUE |= v_null + ParserAction.Create(x => x.UserContents = new json_state { type = json_token.v_null }); gen.PushStarts(JSON); gen.PrintProductionRules(); gen.Generate(); gen.PrintStates(); gen.PrintTable(); #if false Console.WriteLine(gen.GlobalPrinter.ToString()); Console.WriteLine(gen.CreateExtendedShiftReduceParserInstance().ToCSCode("json_parser")); #endif return(gen.CreateExtendedShiftReduceParserInstance()); }
// Note that we create AST nodes private ParseTreeNode ReduceRegularNode(ParserAction action) { var childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var span = ComputeNewNodeSpan(childCount); var newNode = new ParseTreeNode(action.ReduceProduction, span); var newIsOp = newNode.Term.Flags.IsSet(TermFlags.IsOperator); for(int i = 0; i < childCount; i++) { var childNode = Context.ParserStack[firstChildIndex + i]; if(ShouldSkipChildNode(childNode)) continue; //skip punctuation or empty transient nodes //AST nodes are created when we pop the (child) parse node from the stack, not when we push it into the stack. // See more in comments to CheckCreateAstNode method CheckCreateAstNode(childNode); //Inherit precedence and associativity, to cover a standard case: BinOp->+|-|*|/; // BinOp node should inherit precedence from underlying operator symbol. Keep in mind special case of SQL operator "NOT LIKE" which consists // of 2 tokens. We therefore inherit "max" precedence from any children if(newIsOp && childNode.Precedence != BnfTerm.NoPrecedence && childNode.Precedence > newNode.Precedence) { newNode.Precedence = childNode.Precedence; newNode.Associativity = childNode.Associativity; } newNode.ChildNodes.Add(childNode); }//for i return newNode; }
private ParseTreeNode ReduceRegularNode(ParserAction action) { var childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var span = ComputeNewNodeSpan(childCount); var newNode = new ParseTreeNode(action.ReduceProduction, span); for(int i = 0; i < childCount; i++) { var childNode = Context.ParserStack[firstChildIndex + i]; if(ShouldSkipChildNode(childNode)) continue; //skip punctuation or empty transient nodes CheckCreateAstNode(childNode); //AST nodes for lists and for terminals are created here //For single-child reduces inherit precedence and associativity, to cover a standard case: BinOp->+|-|*|/; // BinOp node should inherit precedence from underlying operator symbol if(childCount == 1 && childNode.Precedence != BnfTerm.NoPrecedence) { newNode.Precedence = childNode.Precedence; newNode.Associativity = childNode.Associativity; } newNode.ChildNodes.Add(childNode); }//for i return newNode; }
/// <summary> /// ESRCAL의 파서제너레이터를 생성합니다. /// </summary> /// <returns></returns> private ShiftReduceParser get_pargen() { if (pargen != null) { return(pargen); } var gen = new ParserGenerator(); // Non-Terminals var expr = gen.CreateNewProduction("expr", false); // Terminals //var id = gen.CreateNewProduction("id"); var num = gen.CreateNewProduction("num"); //var str = gen.CreateNewProduction("str"); var plus = gen.CreateNewProduction("plus"); var minus = gen.CreateNewProduction("minus"); var multiple = gen.CreateNewProduction("multiple"); var divide = gen.CreateNewProduction("divide"); //var loop = gen.CreateNewProduction("loop"); var op_open = gen.CreateNewProduction("op_open"); var op_close = gen.CreateNewProduction("op_close"); //var pp_open = gen.CreateNewProduction("pp_open"); // [ //var pp_close = gen.CreateNewProduction("pp_close"); // ] //var equal = gen.CreateNewProduction("equal"); //var to = gen.CreateNewProduction("to"); //var scolon = gen.CreateNewProduction("scolon"); //var comma = gen.CreateNewProduction("comma"); //var _foreach = gen.CreateNewProduction("foreach"); //var _if = gen.CreateNewProduction("if"); //var _else = gen.CreateNewProduction("else"); expr |= num + ParserAction.Create(x => x.UserContents = double.Parse(x.Contents)); expr |= expr + plus + expr + ParserAction.Create(x => x.UserContents = (double)x.Childs[0].UserContents + (double)x.Childs[2].UserContents); expr |= expr + minus + expr + ParserAction.Create(x => x.UserContents = (double)x.Childs[0].UserContents - (double)x.Childs[2].UserContents); expr |= expr + multiple + expr + ParserAction.Create(x => x.UserContents = (double)x.Childs[0].UserContents * (double)x.Childs[2].UserContents); expr |= expr + divide + expr + ParserAction.Create(x => x.UserContents = (double)x.Childs[0].UserContents / (double)x.Childs[2].UserContents); expr |= minus + expr + ParserAction.Create(x => x.UserContents = -(double)x.Childs[1].UserContents); expr |= op_open + expr + op_close + ParserAction.Create(x => x.UserContents = x.Childs[1].UserContents); // right associativity, - gen.PushConflictSolver(false, new Tuple <ParserProduction, int>(expr, 5)); // left associativity, *, / gen.PushConflictSolver(true, multiple, divide); // left associativity, +, - gen.PushConflictSolver(true, plus, minus); try { gen.PushStarts(expr); gen.PrintProductionRules(); gen.GenerateLALR(); gen.PrintStates(); gen.PrintTable(); } catch (Exception e) { Console.Console.Instance.WriteLine(e.Message); } Console.Console.Instance.WriteLine(gen.GlobalPrinter.ToString()); return(pargen = gen.CreateShiftReduceParserInstance()); }
private void ExecuteAccept(ParserAction action) { var root = Context.ParserStack.Pop(); //Pop root CheckCreateAstNode(root); Context.CurrentParseTree.Root = root; Context.Status = ParserStatus.Accepted; }
private void ExpandParserStateList(int initialIndex) { // Iterate through states (while new ones are created) and create shift transitions and new states for (int index = initialIndex; index < Data.States.Count; index++) { var state = Data.States[index]; //Get all possible shifts foreach (var term in state.BuilderData.ShiftTerms) { var shiftItems = state.BuilderData.ShiftItems.SelectByCurrent(term); //Get set of shifted cores and find/create target state var shiftedCoreItems = shiftItems.GetShiftedCores(); var newState = FindOrCreateState(shiftedCoreItems); //Create shift action var newAction = new ParserAction(ParserActionType.Shift, newState, null); state.Actions[term] = newAction; //Link items in old/new states foreach (var shiftItem in shiftItems) { shiftItem.ShiftedItem = newState.BuilderData.AllItems.FindByCore(shiftItem.Core.ShiftedItem); }//foreach shiftItem }//foreach term } //for index }
private ParserActionType GetActionTypeForOperation(ParserAction action) { for (int i = Context.ParserStack.Count - 1; i >= 0; i--) { var prevNode = Context.ParserStack[i]; if (prevNode == null) continue; if (prevNode.Precedence == BnfTerm.NoPrecedence) continue; ParserActionType result; //if previous operator has the same precedence then use associativity var input = Context.CurrentParserInput; if (prevNode.Precedence == input.Precedence) result = input.Associativity == Associativity.Left ? ParserActionType.Reduce : ParserActionType.Shift; else result = prevNode.Precedence > input.Precedence ? ParserActionType.Reduce : ParserActionType.Shift; return result; } //If no operators found on the stack, do simple shift return ParserActionType.Shift; }
public TransitionAction(ParserAction action) { Action = action; }
private void ExecuteReduce(ParserAction action) { var reduceProduction = action.ReduceProduction; ParseTreeNode newNode; if(reduceProduction.IsSet(ProductionFlags.IsListBuilder)) newNode = ReduceExistingList(action); else if(reduceProduction.LValue.FlagIsSet(TermFlags.IsListContainer)) newNode = ReduceListContainer(action); else if (reduceProduction.LValue.FlagIsSet(TermFlags.IsTransient)) newNode = ReduceTransientNonTerminal(action); else newNode = ReduceRegularNode(action); //final reduce actions ---------------------------------------------------------- Context.ParserStack.Pop(reduceProduction.RValues.Count); //Push new node into stack and move to new state //First read the state from top of the stack Context.CurrentParserState = Context.ParserStack.Top.State; if (_traceEnabled) Context.AddTrace(Resources.MsgTracePoppedState, reduceProduction.LValue.Name); // Shift to new state (LALR) - execute shift over non-terminal var shift = Context.CurrentParserState.Actions[reduceProduction.LValue]; Context.ParserStack.Push(newNode, shift.NewState); Context.CurrentParserState = shift.NewState; }
//List container is created by MakePlusRule, MakeStarRule with allowTrailingDelimiter = true // it is a special case for parser. The "real" list in grammar is the "container", but list members had been accumulated under // the transient "plus-list" which is a child of this container. So we need to copy all "grandchildren" from child to parent. private ParseTreeNode ReduceListContainer(ParserAction action) { int childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var span = ComputeNewNodeSpan(childCount); var newNode = new ParseTreeNode(action.ReduceProduction, span); if(childCount > 0) { //if it is not empty production - might happen for MakeStarRule var listNode = Context.ParserStack[firstChildIndex]; //get the transient list with all members - it is the first child node newNode.ChildNodes.AddRange(listNode.ChildNodes); //copy all list members } return newNode; }
private void ExecuteConflictAction(ParserAction action) { var args = new ConflictResolutionArgs(_context, action); _grammar.OnResolvingConflict(args); switch(args.Result) { case ParserActionType.Reduce: ExecuteReduce(args.ReduceProduction); break; case ParserActionType.Operator: ExecuteOperatorAction(action.NewState, args.ReduceProduction); break; case ParserActionType.Shift: default: ExecuteShift(action.NewState); break; } }
private void ExecuteShift(ParserAction action) { Context.ParserStack.Push(Context.CurrentParserInput, action.NewState); Context.CurrentParserState = action.NewState; Context.CurrentParserInput = null; if (action.NewState.DefaultAction == null) //read only if new state is NOT single-reduce state ReadInput(); }
/// <summary> /// SRCAL의 파서제너레이터를 생성합니다. /// </summary> /// <returns></returns> private ExtendedShiftReduceParser get_pargen() { if (pargen != null) { return(pargen); } var gen = new ParserGenerator(); // Non-Terminals var script = gen.CreateNewProduction("script", false); var line = gen.CreateNewProduction("line", false); var lines = gen.CreateNewProduction("lines", false); var expr = gen.CreateNewProduction("expr", false); var block = gen.CreateNewProduction("block", false); var iblock = gen.CreateNewProduction("iblock", false); var index = gen.CreateNewProduction("index", false); var variable = gen.CreateNewProduction("variable", false); var argument = gen.CreateNewProduction("argument", false); var function = gen.CreateNewProduction("function", false); var runnable = gen.CreateNewProduction("runnable", false); // Terminals var name = gen.CreateNewProduction("name"); var _const = gen.CreateNewProduction("const"); // number | string var loop = gen.CreateNewProduction("loop"); var op_open = gen.CreateNewProduction("op_open"); var op_close = gen.CreateNewProduction("op_close"); var pp_open = gen.CreateNewProduction("pp_open"); // [ var pp_close = gen.CreateNewProduction("pp_close"); // ] var equal = gen.CreateNewProduction("equal"); var to = gen.CreateNewProduction("to"); var scolon = gen.CreateNewProduction("scolon"); var comma = gen.CreateNewProduction("comma"); var plus = gen.CreateNewProduction("plus"); // + var minus = gen.CreateNewProduction("minus"); // - var multiple = gen.CreateNewProduction("multiple"); // * var divide = gen.CreateNewProduction("divide"); // / var _foreach = gen.CreateNewProduction("foreach"); var _if = gen.CreateNewProduction("if"); var _else = gen.CreateNewProduction("else"); script |= lines + ParserAction.Create((m, f, b, x) => { var module = new LPModule(); var sfunc = module.CreateFunction("start"); var bb = sfunc.CreateBasicBlock(); x.Childs[0].Action(module, sfunc, bb, x.Childs[0]); x.UserContents = module; }); script |= ParserGenerator.EmptyString + ParserAction.Create((m, f, b, x) => { x.UserContents = new LPModule(); }); block |= pp_open + iblock + pp_close + ParserAction.Create((m, f, b, x) => { }); block |= line + ParserAction.Create((m, f, b, x) => { }); iblock |= block + ParserAction.Create((m, f, b, x) => { }); iblock |= lines + ParserAction.Create((m, f, b, x) => { }); iblock |= ParserGenerator.EmptyString + ParserAction.Create((m, f, b, x) => { }); line |= expr + ParserAction.Create((m, f, b, x) => { }); lines |= expr + ParserAction.Create((m, f, b, x) => { x.Childs[0].Action(m, f, b, x.Childs[0]); }); lines |= expr + lines + ParserAction.Create((m, f, b, x) => { x.Childs[0].Action(m, f, b, x.Childs[0]); x.Childs[1].Action(m, f, b, x.Childs[1]); }); expr |= function + ParserAction.Create((m, f, b, x) => { x.Childs[0].Action(m, f, b, x.Childs[0]); }); expr |= name + equal + index + ParserAction.Create((m, f, b, x) => { }); expr |= runnable + ParserAction.Create((m, f, b, x) => { }); function |= name + op_open + op_close + ParserAction.Create((m, f, b, x) => { var caller = m.CreateFunction(x.Childs[0].Contents); caller.IsExtern = true; var ci = LPCallOperator.Create(caller, new List <LPUser>()); b.Insert(ci); x.UserContents = ci; }); function |= name + op_open + argument + op_close + ParserAction.Create((m, f, b, x) => { var caller = m.CreateFunction(x.Childs[0].Contents); caller.IsExtern = true; x.Childs[2].Action(m, f, b, x); var ci = LPCallOperator.Create(caller, x.Childs[2].UserContents as List <LPUser>); b.Insert(ci); x.UserContents = ci; }); argument |= index + ParserAction.Create((m, f, b, x) => { x.Childs[0].Action(m, f, b, x); x.UserContents = new List <LPUser> { x.Childs[0].UserContents as LPUser }; }); argument |= index + comma + argument + ParserAction.Create((m, f, b, x) => { }); index |= variable + ParserAction.Create((m, f, b, x) => { }); index |= variable + pp_open + variable + pp_close + ParserAction.Create((m, f, b, x) => { }); index |= index + plus + index + ParserAction.Create((m, f, b, x) => { }); index |= index + minus + index + ParserAction.Create((m, f, b, x) => { }); index |= index + multiple + index + ParserAction.Create((m, f, b, x) => { }); index |= index + divide + index + ParserAction.Create((m, f, b, x) => { }); index |= minus + index + ParserAction.Create((m, f, b, x) => { }); index |= op_open + index + op_close + ParserAction.Create((m, f, b, x) => { }); variable |= name + ParserAction.Create((m, f, b, x) => { }); variable |= function + ParserAction.Create((m, f, b, x) => { }); variable |= _const + ParserAction.Create((m, f, b, x) => { x.UserContents = LPConstant.Create(x.Childs[0].Contents); }); runnable |= loop + op_open + name + equal + index + to + index + op_close + block + ParserAction.Create((m, f, b, x) => { }); runnable |= _foreach + op_open + name + scolon + index + op_close + block + ParserAction.Create((m, f, b, x) => { }); runnable |= _if + op_open + index + op_close + block + ParserAction.Create((m, f, b, x) => { }); runnable |= _if + op_open + index + op_close + block + _else + block + ParserAction.Create((m, f, b, x) => { }); gen.PushConflictSolver(true, _else); gen.PushConflictSolver(true, new Tuple <ParserProduction, int>(runnable, 2)); gen.PushConflictSolver(true, new Tuple <ParserProduction, int>(index, 6)); gen.PushConflictSolver(false, multiple, divide); gen.PushConflictSolver(false, plus, minus); //gen.PushConflictSolver(true, new Tuple<ParserProduction, int>(index, 1)); gen.PushConflictSolver(false, pp_open); gen.PushConflictSolver(true, new Tuple <ParserProduction, int>(index, 0)); try { gen.PushStarts(script); gen.PrintProductionRules(); gen.GenerateLALR2(); gen.PrintStates(); gen.PrintTable(); } catch (Exception e) { Console.Console.Instance.WriteLine(e.Message); } Console.Console.Instance.WriteLine(gen.GlobalPrinter.ToString()); return(pargen = gen.CreateExtendedShiftReduceParserInstance()); }
private ParseTreeNode ReduceExistingList(ParserAction action) { int childCount = action.ReduceProduction.RValues.Count; int firstChildIndex = Context.ParserStack.Count - childCount; var listNode = Context.ParserStack[firstChildIndex]; //get the list already created - it is the first child node listNode.Span = ComputeNewNodeSpan(childCount); var listMember = Context.ParserStack.Top; //next list member is the last child - at the top of the stack if (ShouldSkipChildNode(listMember)) return listNode; CheckCreateAstNode(listMember); listNode.ChildNodes.Add(listMember); return listNode; }
public static IEnumerable <FileDiff> Parse(string input, string lineEnding = "\n") { if (string.IsNullOrWhiteSpace(input)) { return(Enumerable.Empty <FileDiff>()); } var lines = input.Split(new[] { lineEnding }, StringSplitOptions.None); if (lines.Length == 0) { return(Enumerable.Empty <FileDiff>()); } var files = new List <FileDiff>(); var in_del = 0; var in_add = 0; ChunkDiff current = null; FileDiff file = null; int oldStart, newStart; int oldLines, newLines; ParserAction start = (line, m) => { file = new FileDiff(); files.Add(file); if (file.To == null && file.From == null) { var fileNames = parseFile(line); if (fileNames != null) { file.From = fileNames[0]; file.To = fileNames[1]; } } }; ParserAction restart = (line, m) => { if (file == null || file.Chunks.Count != 0) { start(null, null); } }; ParserAction new_file = (line, m) => { restart(null, null); file.Type = FileChangeType.Add; file.From = "/dev/null"; }; ParserAction deleted_file = (line, m) => { restart(null, null); file.Type = FileChangeType.Delete; file.To = "/dev/null"; }; ParserAction index = (line, m) => { restart(null, null); file.Index = line.Split(' ').Skip(1); }; ParserAction from_file = (line, m) => { restart(null, null); file.From = parseFileFallback(line); }; ParserAction to_file = (line, m) => { restart(null, null); file.To = parseFileFallback(line); }; ParserAction chunk = (line, match) => { in_del = oldStart = int.Parse(match.Groups[1].Value); oldLines = match.Groups[2].Success ? int.Parse(match.Groups[2].Value) : 0; in_add = newStart = int.Parse(match.Groups[3].Value); newLines = match.Groups[4].Success ? int.Parse(match.Groups[4].Value) : 0; current = new ChunkDiff( content: line, oldStart: oldStart, oldLines: oldLines, newStart: newStart, newLines: newLines ); file.Chunks.Add(current); }; ParserAction del = (line, match) => { current.Changes.Add(new LineDiff(type: LineChangeType.Delete, index: in_del++, content: line)); file.Deletions++; }; ParserAction add = (line, m) => { current.Changes.Add(new LineDiff(type: LineChangeType.Add, index: in_add++, content: line)); file.Additions++; }; const string noeol = "\\ No newline at end of file"; Action <string> normal = line => { if (file == null) { return; } current.Changes.Add(new LineDiff( oldIndex: line == noeol ? 0 : in_del++, newIndex: line == noeol ? 0 : in_add++, content: line)); }; var schema = new Dictionary <Regex, ParserAction> { { new Regex(@"^diff\s"), start }, { new Regex(@"^new file mode \d+$"), new_file }, { new Regex(@"^deleted file mode \d+$"), deleted_file }, { new Regex(@"^index\s[\da-zA-Z]+\.\.[\da-zA-Z]+(\s(\d+))?$"), index }, { new Regex(@"^---\s"), from_file }, { new Regex(@"^\+\+\+\s"), to_file }, { new Regex(@"^@@\s+\-(\d+),?(\d+)?\s+\+(\d+),?(\d+)?\s@@"), chunk }, { new Regex(@"^-"), del }, { new Regex(@"^\+"), add } }; Func <string, bool> parse = line => { foreach (var p in schema) { var m = p.Key.Match(line); if (m.Success) { p.Value(line, m); return(true); } } return(false); }; foreach (var line in lines) { if (!parse(line)) { normal(line); } } return(files); }
private ParseTreeNode ReduceTransientNonTerminal(ParserAction action) { var topIndex = Context.ParserStack.Count - 1; var childCount = action.ReduceProduction.RValues.Count; for(int i = 0; i < childCount; i++) { var child = Context.ParserStack[topIndex - i]; if (ShouldSkipChildNode(child)) continue; CheckCreateAstNode(child); return child; } //Otherwise return an empty transient node; if it is part of the list, the list will skip it var span = ComputeNewNodeSpan(childCount); return new ParseTreeNode(action.ReduceProduction, span); }
//Detect conflicts that cannot be handled by non-canonical NLALR method directly, by may be fixed by grammar transformation private void DetectNlalrFixableConflicts(ParserState state) { var stateData = state.BuilderData; //compute R-R and S-R conflicting lookaheads var reduceLkhds = new BnfTermSet(); var rrConflicts = new BnfTermSet(); var srConflicts = new BnfTermSet(); foreach (var reduceItem in state.BuilderData.ReduceItems) { foreach (var lkh in reduceItem.ReducedLookaheads) { if (stateData.ShiftTerms.Contains(lkh)) { if (!lkh.IsSet(TermOptions.UsePrecedence)) { srConflicts.Add(lkh); //S-R conflict } } else if (reduceLkhds.Contains(lkh)) { rrConflicts.Add(lkh); //R-R conflict } reduceLkhds.Add(lkh); } //foreach lkh } //foreach item if (srConflicts.Count == 0 && rrConflicts.Count == 0) { return; } //Collect all cores to recommend for adding WrapTail hint. var allConflicts = new BnfTermSet(); allConflicts.UnionWith(srConflicts); allConflicts.UnionWith(rrConflicts); foreach (var conflict in allConflicts) { var conflictingShiftItems = state.BuilderData.ShiftItems.SelectByCurrent(conflict); foreach (var item in conflictingShiftItems) { if (!item.Core.IsInitial) //only non-initial { _coresToAddWrapTailHint.Add(item.Core); } } foreach (var reduceItem in state.BuilderData.ReduceItems) { var conflictingSources = reduceItem.ReducedLookaheadSources.SelectByCurrent(conflict); foreach (var source in conflictingSources) { _coresToAddWrapTailHint.Add(source.Core); } } } //still report them as conflicts ReportParseConflicts(state, srConflicts, rrConflicts); //create default actions and remove conflicts from list so we don't deal with them anymore foreach (var conflict in rrConflicts) { var reduceItems = stateData.ReduceItems.SelectByReducedLookahead(conflict); var action = ParserAction.CreateReduce(reduceItems.First().Core.Production); state.Actions[conflict] = action; } //Update ResolvedConflicts and Conflicts sets stateData.ResolvedConflicts.UnionWith(srConflicts); stateData.ResolvedConflicts.UnionWith(rrConflicts); stateData.Conflicts.ExceptWith(stateData.ResolvedConflicts); }//method
private void ExecuteConflictAction(ParserAction action) { var args = action.ResolveConflict(_grammar, Context); switch(args.Result) { case ParserActionType.Reduce: ExecuteReduce(new ParserAction(ParserActionType.Reduce, null, args.ReduceProduction)); break; case ParserActionType.Operator: ExecuteOperatorAction(new ParserAction(ParserActionType.Operator, action.NewState, args.ReduceProduction)); break; case ParserActionType.Shift: default: ExecuteShift(action); break; } if (_traceEnabled) Context.AddTrace(Resources.MsgTraceConflictResolved); }
internal void AddAction(ParserAction action) { actions.Add(action); }
private void ExecuteOperatorAction(ParserAction action) { var realActionType = GetActionTypeForOperation(action); if (_traceEnabled) Context.AddTrace(Resources.MsgTraceOpResolved, realActionType); switch (realActionType) { case ParserActionType.Shift: ExecuteShift(action); break; case ParserActionType.Reduce: ExecuteReduce(action); break; }//switch }
internal void AddAction(int actionCell) { actions.Add(ParserAction.Decode(actionCell)); }
private bool FillAmbiguousTokenActions(DotState[] states, bool isGlr) { for (int i = 0; i != states.Length; ++i) { var state = states[i]; foreach (var ambToken in grammar.AmbiguousSymbols) { var validTokenActions = new Dictionary <int, int>(); foreach (int token in ambToken.Tokens) { int cell = data.Get(i, token); if (cell == 0) { continue; } validTokenActions.Add(token, cell); } switch (validTokenActions.Count) { case 0: // AmbToken is entirely non-acceptable for this state data.Set(i, ambToken.Index, 0); break; case 1: { var pair = validTokenActions.First(); if (pair.Key == ambToken.MainToken) { // ambToken action is the same as for the main token data.Set(i, ambToken.Index, pair.Value); } else { // Resolve ambToken to a one of the underlying tokens. // In runtime transition will be acceptable when this token // is in Msg and non-acceptable when this particular token // is not in Msg. var action = new ParserAction { Kind = ParserActionKind.Resolve, Value1 = pair.Key }; data.Set(i, ambToken.Index, ParserAction.Encode(action)); } } break; default: if (validTokenActions.Values.Distinct().Count() == 1) { // Multiple tokens but with the same action goto case 1; } if (!isGlr) { return(false); } // This kind of ambiguity requires GLR to follow all alternate tokens { var pair = validTokenActions.First(); var forkAction = new ParserAction { Kind = ParserActionKind.Fork, Value1 = pair.Key }; data.Set(i, ambToken.Index, ParserAction.Encode(forkAction)); } break; } } } return(true); }
public void CompileAction(ParserAction action, OList <ParserAction> actions) { if (action.Consumed) { return; } switch (action.Token) { case ParserToken.Import: { var expr = (ImportExpression)action.Related.Single(); var type = expr.Type; var alias = expr.As; if (File.Exists(type)) { Assembly.LoadFile(type); Debug.WriteLine($"{type} was loaded successfully."); break; } Type foundtype; foreach (var asm in AppDomain.CurrentDomain.GetAssemblies()) { foundtype = asm.GetType(type); if (foundtype == null) { continue; } goto _found; } throw new ExpressionCompileException($"Unable to find type: {type}"); _found: Debug.WriteLine($"{type} was loaded successfully."); if (alias != null) { Context.Imports.AddType(foundtype, alias); } else { Context.Imports.AddType(foundtype); } break; } case ParserToken.Declaration: { var expr = (VariableDeclarationExpression)action.Related.Single(); var name = expr.Name.AsString(); //validate name { if (InterpreterOptions.BuiltinKeywords.Any(w => w.Equals(name, StringComparison.Ordinal))) { throw new ExpressionCompileException($"Variable named '{name}' is taken by the interpreter."); } } var right = expr.Right; var evaluation = EvaluateExpression(right); Context.Variables[name] = Data.Create(evaluation); break; } case ParserToken.Expression: { var line = action.RelatedLines.Single(); if (line.Metadata.Contains("ParserToken.Expression")) { break; } line.Metadata.Add("ParserToken.Expression"); line.MarkedForDeletion = false; //they are all true by default, well all lines that were found relevant to ParserAction var copy = line.Content; var ew = new ExpressionWalker(ExpressionLexer.Tokenize(copy)); var vars = Context.Variables; bool changed = false; int last_access_index = 0; //we reparse the line and handle all expressions. if (ew.HasNext) { do { _restart: if (changed) { changed = false; var cleanedCopy = new string(' ', last_access_index) + copy.Substring(last_access_index); ew = new ExpressionWalker(ExpressionLexer.Tokenize(cleanedCopy)); if (ew.Count == 0) { break; } } var current = ew.Current; //iterate all tokens of that line if (current.Token != ExpressionToken.Mod || !ew.HasNext) { continue; } var mod = ew.Current; current = ew.NextToken(); switch (current.Token) { case ExpressionToken.LeftParen: { //it is an expression. ew.NextOrThrow(); var expression = Expression.ParseExpression(ew); object val = EvaluateObject(expression, line); if (val is ReferenceData rd) //make sure references are unpacked { val = rd.UnpackReference(Context); } ew.IsCurrentOrThrow(ExpressionToken.RightParen); var emit = val is Data d?d.Emit() : val.ToString(); copy = copy .Remove(mod.Match.Index, ew.Current.Match.Index + 1 - mod.Match.Index) .Insert(mod.Match.Index, emit); last_access_index = mod.Match.Index + emit.Length; changed = true; goto _restart; } default: continue; } } while (ew.Next()); } line.Replace(copy + (copy.EndsWith("\n") ? "" : "\n")); break; } case ParserToken.ForeachLoop: { _compileForeach(action); break; } case ParserToken.Template: break; default: throw new ArgumentOutOfRangeException(); } }