private void AddHistory(Lexeme lexeme, bool setTrivia = true) { Debug.Assert(m_valuableCursor == m_valuableHistory.Count); m_fullHistory.Add(lexeme); int fullCursor = m_fullHistory.Count(); if (setTrivia) { int lastTriviaStartIndex = m_lastNotSkippedLexemeIndex + 1; int lastTriviaLength = fullCursor - 1 - lastTriviaStartIndex; if (lastTriviaLength < 0) { lastTriviaLength = 0; } lexeme.SetTrivia(new LexemeRange(m_fullHistory, lastTriviaStartIndex, lastTriviaLength)); m_lastNotSkippedLexemeIndex = fullCursor - 1; m_valuableHistory.Add(fullCursor - 1); m_valuableCursor = m_valuableHistory.Count; } }
private void RecoverError(Lexeme z, CancellationToken ctoken) { List<ParserHead> shiftedHeads = m_shiftedHeads; m_heads.Clear(); int errorHeadCount = m_errorCandidates.Count; Debug.Assert(errorHeadCount > 0); if (errorHeadCount > c_panicRecoveryThreshold) { PerformPanicRecovery(z, shiftedHeads); } for (int i = 0; i < errorHeadCount; i++) { ctoken.ThrowIfCancellationRequested(); var head = m_errorCandidates[i]; //restore stack before reduce, in case of an invalided reduce has been performed head.RestoreToLastShift(); if (!z.IsEndOfStream) { //option 1: remove //remove current token and continue if (EnableDeletionRecovery) { var deleteHead = head.Clone(); deleteHead.IncreaseErrorRecoverLevel(); deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span) { ErrorArgument = z.Value }); shiftedHeads.Add(deleteHead); } if (EnableReplacementRecovery) { //option 2: replace //replace the current input char with all possible shifts token and continue ReduceAndShiftForRecovery(z, head, shiftedHeads, m_errorDef.TokenMistakeId, ctoken); } } if (EnableInsertionRecovery) { //option 3: insert //insert all possible shifts token and continue ReduceAndShiftForRecovery(z, head, m_heads, m_errorDef.TokenMissingId, ctoken); } else if(z.IsEndOfStream) { //no other choices PerformPanicRecovery(z, shiftedHeads); } } }
private void PerformPanicRecovery(Lexeme z, List<ParserHead> shiftedHeads) { //Panic recovery //to the 1st head: //pop stack until there's a state S, which has a Goto action of a non-terminal A //discard input until there's an token a in Follow(A) //push Goto(s, A) into stack //discard all other heads m_heads.Clear(); m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0)); shiftedHeads.Clear(); ParserHead errorHead1 = m_errorCandidates[0]; m_errorCandidates.Clear(); IProduction p = errorHead1.PanicRecover(m_transitions, z.Value.Span, z.IsEndOfStream); ProductionBase productionBase = p as ProductionBase; if (productionBase != null) { var follow = productionBase.Info.Follow; m_heads.Add(errorHead1); throw new PanicRecoverException(follow); } }
public void Input(Lexeme z) { Input(z, Task.Factory.CancellationToken); }
public void Shift(Lexeme z, int targetStateIndex) { #if HISTORY var from = m_topStack.StateIndex; #endif StackNode shiftNode = new StackNode(targetStateIndex, m_topStack, z); m_topStack = shiftNode; m_lastShiftStack = shiftNode; #if HISTORY var to = m_topStack.StateIndex; History.Add(String.Format("S{0}:{1}", from, to)); #endif }
public void Reduce(IProduction production, ReduceVisitor reducer, Lexeme lookahead) { #if HISTORY var from = m_topStack.StateIndex; #endif if (production == null) { //Accept Debug.Assert(m_topStack.PrevNode.StateIndex == 0); //TODO: accepted IsAccepted = true; return; } if (production.AggregatesAmbiguities) { AmbiguityAggregator = ((ProductionBase)production).CreateAggregator(); } var reduceResult = production.Accept(reducer, m_topStack); m_topStack = reduceResult.NewTopStack; var reduceError = reduceResult.ReduceError; if (reduceError != null) { IncreaseErrorRecoverLevel(); if (reduceError.ErrorPosition == null) { reduceError = new ErrorRecord(reduceError.ErrorId, lookahead.Value.Span); } AddError(reduceError); } #if HISTORY var to = m_topStack.StateIndex; History.Add(String.Format("R{0}:{1}", from, to)); #endif }
public TypeRef(Lexeme name) { TypeName = name; }
private void RecoverError(Lexeme z) { List<ParserHead> shiftedHeads = m_shiftedHeads; m_heads.Clear(); int errorHeadCount = m_errorCandidates.Count; Debug.Assert(errorHeadCount > 0); for (int i = 0; i < errorHeadCount; i++) { var head = m_errorCandidates[i]; //option 1: remove //remove current token and continue if (!z.IsEndOfStream) { var deleteHead = head.Clone(); deleteHead.IncreaseErrorRecoverLevel(); deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span) { ErrorArgument = z.Value }); shiftedHeads.Add(deleteHead); } //option 2: insert //insert all possible shifts token and continue Queue<ParserHead> recoverQueue = new Queue<ParserHead>(); for (int j = 0; j < m_transitions.TokenCount - 1; j++) { recoverQueue.Enqueue(head); while (recoverQueue.Count > 0) { var recoverHead = recoverQueue.Dequeue(); int recoverStateNumber = recoverHead.TopStackStateIndex; var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber); int tokenIndex; if (shiftLexer == null) { tokenIndex = z.TokenIndex; } else { tokenIndex = z.GetTokenIndex(shiftLexer.Value); } var recoverShifts = m_transitions.GetShift(recoverStateNumber, j); var recoverShift = recoverShifts; while (recoverShift != null) { var insertHead = recoverHead.Clone(); var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j)); insertHead.Shift(insertLexeme, recoverShift.Value); insertHead.IncreaseErrorRecoverLevel(); insertHead.AddError(new ErrorRecord(m_errorDef.TokenMissingId, z.Value.Span) { ErrorArgument = insertLexeme.Value }); m_heads.Add(insertHead); recoverShift = recoverShift.GetNext(); } var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber); if (reduceLexer == null) { tokenIndex = z.TokenIndex; } else { tokenIndex = z.GetTokenIndex(reduceLexer.Value); } var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j); var recoverReduce = recoverReduces; while (recoverReduce != null) { int productionIndex = recoverReduce.Value; IProduction production = m_transitions.NonTerminals[productionIndex]; var reducedHead = recoverHead.Clone(); reducedHead.Reduce(production, m_reducer, z); //add back to queue, until shifted m_recoverReducedHeads.Add(reducedHead); //get next reduce recoverReduce = recoverReduce.GetNext(); } if (m_recoverReducedHeads.Count > 0) { m_tempHeads.Clear(); m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads); m_recoverReducedHeads.Clear(); foreach (var recoveredHead in m_tempHeads) { recoverQueue.Enqueue(recoveredHead); } } } } } }
private VariableInfo ResolveField(CodeClassType type, Lexeme identifier) { //step1, see current class if (type.Fields.Contains(identifier.Value)) { return type.Fields[identifier.Value]; } //step2, see base class if (m_currentType.BaseType != null) { return ResolveField(m_currentType.BaseType, identifier); } m_errorManager.AddError(c_SE_VariableDeclMissing, identifier.Span, identifier.Value); return null; }
public VariableRef(Lexeme name) { VariableName = name; }
private void RecoverError(Lexeme z) { List<ParserHead> shiftedHeads = m_shiftedHeads; m_heads.Clear(); int errorHeadCount = m_errorCandidates.Count; Debug.Assert(errorHeadCount > 0); if (errorHeadCount > c_panicRecoveryThreshold) { //Panic recovery //to the 1st head: //pop stack until there's a state S, which has a Goto action of a non-terminal A //discard input until there's an token a in Follow(A) //push Goto(s, A) into stack //discard all other heads m_heads.Clear(); m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0)); shiftedHeads.Clear(); ParserHead errorHead1 = m_errorCandidates[0]; m_errorCandidates.Clear(); IProduction p = errorHead1.PanicRecover(m_transitions, z.Value.Span); var follow = (p as ProductionBase).Info.Follow; m_heads.Add(errorHead1); throw new PanicRecoverException(follow); } for (int i = 0; i < errorHeadCount; i++) { var head = m_errorCandidates[i]; if (!z.IsEndOfStream) { //option 1: remove //remove current token and continue var deleteHead = head.Clone(); deleteHead.IncreaseErrorRecoverLevel(); deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span) { ErrorArgument = z.Value }); shiftedHeads.Add(deleteHead); //option 2: replace //replace the current input char with all possible shifts token and continue ReduceAndShiftForRecovery(z, head, shiftedHeads, m_errorDef.TokenMistakeId); } //option 3: insert //insert all possible shifts token and continue ReduceAndShiftForRecovery(z, head, m_heads, m_errorDef.TokenMissingId); } }
public MethodRef(Lexeme name) { MethodName = name; }
private void PerformPanicRecovery(Lexeme z, List<ParserHead> shiftedHeads) { //Panic recovery //to the 1st head: //pop stack until there's a state S, which has a Goto action of a non-terminal A //discard input until there's an token a in Follow(A) //push Goto(s, A) into stack //discard all other heads m_heads.Clear(); m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0)); shiftedHeads.Clear(); ParserHead errorHead1 = m_errorCandidates[0]; m_errorCandidates.Clear(); var candidates = errorHead1.PanicRecover(m_transitions, z.Value.Span, z.IsEndOfStream); ISet<IProduction> follows = new HashSet<IProduction>(); foreach (var candidate in candidates) { ProductionBase p = candidate.Item2 as ProductionBase; follows.UnionWith(p.Info.Follow); m_heads.Add(candidate.Item1); } if (m_heads.Count > 0) { throw new PanicRecoverException(follows); } else { throw new ParsingFailureException("There's no way to recover from parser error"); } }
private void ReduceAndShiftForRecovery(Lexeme z, ParserHead head, IList<ParserHead> shiftTarget, int syntaxError, CancellationToken ctoken) { Queue<ParserHead> recoverQueue = new Queue<ParserHead>(); for (int j = 0; j < m_transitions.TokenCount - 1; j++) { recoverQueue.Enqueue(head); while (recoverQueue.Count > 0) { var recoverHead = recoverQueue.Dequeue(); int recoverStateNumber = recoverHead.TopStackStateIndex; var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber); var recoverShifts = m_transitions.GetShift(recoverStateNumber, j); var recoverShift = recoverShifts; while (recoverShift != null) { ctoken.ThrowIfCancellationRequested(); var insertHead = recoverHead.Clone(); var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j)); insertHead.Shift(insertLexeme, recoverShift.Value); insertHead.IncreaseErrorRecoverLevel(); insertHead.AddError(new ErrorRecord(syntaxError, z.Value.Span) { ErrorArgument = insertLexeme.Value, ErrorArgument2 = z.Value }); shiftTarget.Add(insertHead); recoverShift = recoverShift.GetNext(); } var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber); var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j); var recoverReduce = recoverReduces; while (recoverReduce != null) { ctoken.ThrowIfCancellationRequested(); int productionIndex = recoverReduce.Value; IProduction production = m_transitions.NonTerminals[productionIndex]; var reducedHead = recoverHead.Clone(); reducedHead.Reduce(production, m_reducer, z); //add back to queue, until shifted m_recoverReducedHeads.Add(reducedHead); //get next reduce recoverReduce = recoverReduce.GetNext(); } if (m_recoverReducedHeads.Count > 0) { m_tempHeads.Clear(); m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads); m_recoverReducedHeads.Clear(); foreach (var recoveredHead in m_tempHeads) { recoverQueue.Enqueue(recoveredHead); } } } } }
private VariableInfo ResolveVariable(Lexeme identifier) { //step1, check local parameter & variable definitions if (m_currentMethodParameters.Contains(identifier.Value)) { return m_currentMethodParameters[identifier.Value]; } else if (m_currentMethodVariables.Contains(identifier.Value)) { return m_currentMethodVariables[identifier.Value]; } //step2, if not static method, check fields if (!m_currentMethod.IsStatic) { return ResolveField(m_currentType, identifier); } m_errorManager.AddError(c_SE_VariableDeclMissing, identifier.Span, identifier.Value); return null; }
public void Input(Lexeme z) { while (true) { var heads = m_heads; for (int i = 0; i < heads.Count; i++) { var head = heads[i]; int stateNumber = head.TopStackStateIndex; bool isShiftedOrReduced = false; var shiftLexer = m_transitions.GetLexersInShifting(stateNumber); int tokenIndex; if (shiftLexer == null) { tokenIndex = z.TokenIndex; } else { tokenIndex = z.GetTokenIndex(shiftLexer.Value); } //get shift var shifts = m_transitions.GetShift(stateNumber, tokenIndex); //shifts var shift = shifts; while (shift != null) { isShiftedOrReduced = true; var newHead = head.Clone(); newHead.Shift(z, shift.Value); //save shifted heads m_shiftedHeads.Add(newHead); //get next shift shift = shift.GetNext(); } //reduces var reduceLexer = m_transitions.GetLexersInReducing(stateNumber); if (reduceLexer == null) { tokenIndex = z.TokenIndex; } else { tokenIndex = z.GetTokenIndex(reduceLexer.Value); } var reduces = m_transitions.GetReduce(stateNumber, tokenIndex); var reduce = reduces; while (reduce != null) { isShiftedOrReduced = true; int productionIndex = reduce.Value; IProduction production = m_transitions.NonTerminals[productionIndex]; var reducedHead = head.Clone(); reducedHead.Reduce(production, m_reducer, z); if (reducedHead.IsAccepted) { m_acceptedHeads.Add(reducedHead); } else { //add back to queue, until shifted m_reducedHeads.Add(reducedHead); } //get next reduce reduce = reduce.GetNext(); } if (!isShiftedOrReduced) { m_errorCandidates.Add(head); } } if (m_reducedHeads.Count > 0) { m_heads.Clear(); m_cleaner.CleanHeads(m_reducedHeads, m_heads); m_reducedHeads.Clear(); continue; } else if (m_shiftedHeads.Count == 0 && m_acceptedHeads.Count == 0) { //no action for current lexeme, error recovery RecoverError(z); } else { break; } } CleanShiftedAndAcceptedHeads(); }
public DeletedErrorCorrection(Lexeme unexpectedLexeme) : base(CorrectionMethod.Deleted) { UnexpectedLexeme = unexpectedLexeme; }
public void Reduce(IProduction production, ReduceVisitor reducer, Lexeme lookahead) { #if HISTORY var from = m_topStack.StateIndex; #endif if (production == null) { //Accept Debug.Assert(m_topStack.PrevNode.StateIndex == 0); //TODO: accepted IsAccepted = true; return; } if (Priority < production.Priority) Priority = production.Priority; var reduceResult = production.Accept(reducer, m_topStack); m_topStack = reduceResult.NewTopStack; var reduceError = reduceResult.ReduceError; if (reduceError != null) { IncreaseErrorRecoverLevel(); if (reduceError.ErrorPosition == null) { reduceError.ErrorPosition = lookahead.Span; } AddError(reduceError); } #if HISTORY var to = m_topStack.StateIndex; History.Add(String.Format("R{0}:{1}", from, to)); #endif }