Exemplo n.º 1
0
        private void PerformPanicRecovery(Lexeme z, List <ParserHead> shiftedHeads)
        {
            //Panic recovery
            //to the 1st head:
            //pop stack until there's a state S, which has a Goto action of a non-terminal A
            //discard input until there's an token a in Follow(A)
            //push Goto(s, A) into stack
            //discard all other heads

            m_heads.Clear();
            m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0));
            shiftedHeads.Clear();

            ParserHead errorHead1 = m_errorCandidates[0];

            m_errorCandidates.Clear();

            IProduction p = errorHead1.PanicRecover(m_transitions, z.Value.Span, z.IsEndOfStream);

            ProductionBase productionBase = p as ProductionBase;

            if (productionBase != null)
            {
                var follow = productionBase.Info.Follow;

                m_heads.Add(errorHead1);

                throw new PanicRecoverException(follow);
            }
        }
Exemplo n.º 2
0
        public static bool ShareSameParent(ParserHead h1, ParserHead h2)
        {
            var prev1 = h1.m_topStack.PrevNode;
            var prev2 = h2.m_topStack.PrevNode;

            if (prev1 != null && prev2 != null && ReferenceEquals(prev1, prev2))
            {
                return(true);
            }

            return(false);
        }
Exemplo n.º 3
0
        public bool HasSameErrorsWith(ParserHead other)
        {
            if (other.m_errors == null && m_errors == null)
            {
                return(true);
            }
            if (other.m_errors == null || m_errors == null)
            {
                return(false);
            }
            HashSet <ErrorRecord> myErrors    = new HashSet <ErrorRecord>(m_errors);
            HashSet <ErrorRecord> otherErrors = new HashSet <ErrorRecord>(other.m_errors);

            return(myErrors.SetEquals(otherErrors));
        }
Exemplo n.º 4
0
        private void PerformPanicRecovery(Lexeme z, List <ParserHead> shiftedHeads)
        {
            //Panic recovery
            //to the 1st head:
            //pop stack until there's a state S, which has a Goto action of a non-terminal A
            //discard input until there's an token a in Follow(A)
            //push Goto(s, A) into stack
            //discard all other heads

            m_heads.Clear();
            m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0));
            shiftedHeads.Clear();

            ParserHead errorHead1 = m_errorCandidates[0];

            m_errorCandidates.Clear();

            var candidates = errorHead1.PanicRecover(m_transitions, z.Value.Span, z.IsEndOfStream);

            ISet <IProduction> follows = new HashSet <IProduction>();

            foreach (var candidate in candidates)
            {
                ProductionBase p = candidate.Item2 as ProductionBase;
                follows.UnionWith(p.Info.Follow);

                m_heads.Add(candidate.Item1);
            }
            if (m_heads.Count > 0)
            {
                throw new PanicRecoverException(follows);
            }
            else
            {
                throw new ParsingFailureException("There's no way to recover from parser error");
            }
        }
Exemplo n.º 5
0
        private void ReduceAndShiftForRecovery(Lexeme z, ParserHead head, IList<ParserHead> shiftTarget, int syntaxError, CancellationToken ctoken)
        {
            Queue<ParserHead> recoverQueue = new Queue<ParserHead>();

            for (int j = 0; j < m_transitions.TokenCount - 1; j++)
            {
                recoverQueue.Enqueue(head);

                while (recoverQueue.Count > 0)
                {
                    var recoverHead = recoverQueue.Dequeue();

                    int recoverStateNumber = recoverHead.TopStackStateIndex;

                    var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);

                    var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
                    var recoverShift = recoverShifts;

                    while (recoverShift != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        var insertHead = recoverHead.Clone();

                        var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
                        insertHead.Shift(insertLexeme, recoverShift.Value);
                        insertHead.IncreaseErrorRecoverLevel();
                        insertHead.AddError(new ErrorRecord(syntaxError, z.Value.Span)
                        {
                            ErrorArgument = insertLexeme.Value,
                            ErrorArgument2 = z.Value
                        });

                        shiftTarget.Add(insertHead);

                        recoverShift = recoverShift.GetNext();
                    }

                    var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);

                    var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
                    var recoverReduce = recoverReduces;

                    while (recoverReduce != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        int productionIndex = recoverReduce.Value;
                        IProduction production = m_transitions.NonTerminals[productionIndex];

                        var reducedHead = recoverHead.Clone();

                        reducedHead.Reduce(production, m_reducer, z);

                        //add back to queue, until shifted
                        m_recoverReducedHeads.Add(reducedHead);

                        //get next reduce
                        recoverReduce = recoverReduce.GetNext();
                    }

                    if (m_recoverReducedHeads.Count > 0)
                    {
                        m_tempHeads.Clear();
                        m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
                        m_recoverReducedHeads.Clear();

                        foreach (var recoveredHead in m_tempHeads)
                        {
                            recoverQueue.Enqueue(recoveredHead);
                        }
                    }
                }
            }
        }
Exemplo n.º 6
0
        public bool HasSameErrorsWith(ParserHead other)
        {
            if (other.m_errors == null && m_errors == null)
            {
                return true;
            }
            if (other.m_errors == null || m_errors == null)
            {
                return false;
            }
            HashSet<ErrorRecord> myErrors = new HashSet<ErrorRecord>(m_errors);
            HashSet<ErrorRecord> otherErrors = new HashSet<ErrorRecord>(other.m_errors);

            return myErrors.SetEquals(otherErrors);
        }
Exemplo n.º 7
0
        public static bool ShareSameParent(ParserHead h1, ParserHead h2)
        {
            var prev1 = h1.m_topStack.PrevNode;
            var prev2 = h2.m_topStack.PrevNode;
            if (prev1 != null && prev2 != null && ReferenceEquals(prev1, prev2))
            {
                return true;
            }

            return false;
        }
Exemplo n.º 8
0
        private void ReduceAndShiftForRecovery(Lexeme z, ParserHead head, IList <ParserHead> shiftTarget, int syntaxError, CancellationToken ctoken)
        {
            Queue <ParserHead> recoverQueue = new Queue <ParserHead>();

            for (int j = 0; j < m_transitions.TokenCount - 1; j++)
            {
                recoverQueue.Enqueue(head);

                while (recoverQueue.Count > 0)
                {
                    var recoverHead = recoverQueue.Dequeue();

                    int recoverStateNumber = recoverHead.TopStackStateIndex;

                    var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);

                    var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
                    var recoverShift  = recoverShifts;

                    while (recoverShift != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        var insertHead = recoverHead.Clone();

                        var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
                        insertHead.Shift(insertLexeme, recoverShift.Value);
                        insertHead.IncreaseErrorRecoverLevel();
                        insertHead.AddError(new ErrorRecord(syntaxError, z.Value.Span)
                        {
                            ErrorArgument  = insertLexeme.Value,
                            ErrorArgument2 = z.Value
                        });

                        shiftTarget.Add(insertHead);

                        recoverShift = recoverShift.GetNext();
                    }

                    var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);

                    var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
                    var recoverReduce  = recoverReduces;

                    while (recoverReduce != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        int         productionIndex = recoverReduce.Value;
                        IProduction production      = m_transitions.NonTerminals[productionIndex];

                        var reducedHead = recoverHead.Clone();

                        reducedHead.Reduce(production, m_reducer, z);

                        //add back to queue, until shifted
                        m_recoverReducedHeads.Add(reducedHead);

                        //get next reduce
                        recoverReduce = recoverReduce.GetNext();
                    }

                    if (m_recoverReducedHeads.Count > 0)
                    {
                        m_tempHeads.Clear();
                        m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
                        m_recoverReducedHeads.Clear();

                        foreach (var recoveredHead in m_tempHeads)
                        {
                            recoverQueue.Enqueue(recoveredHead);
                        }
                    }
                }
            }
        }
Exemplo n.º 9
0
        public void CleanHeads(IList <ParserHead> sourceHeads, IList <ParserHead> targetHeads)
        {
            int minErrorLevel = sourceHeads[0].ErrorRecoverLevel;

            //int minErrorCount = sourceHeads[0].Errors != null ? sourceHeads[0].Errors.Count : 0;

            for (int i = 0; i < sourceHeads.Count; i++)
            {
                var head       = sourceHeads[i];
                var errorLevel = head.ErrorRecoverLevel;
                if (errorLevel < minErrorLevel)
                {
                    minErrorLevel = errorLevel;
                }

                //var errorCount = head.Errors != null ? head.Errors.Count : 0;
                //if (errorCount < minErrorCount)
                //{
                //    minErrorCount = errorCount;
                //}
            }

            foreach (var head in sourceHeads)
            {
                if (head.ErrorRecoverLevel > minErrorLevel)
                {
                    //discard heads with higher error level
                    continue;
                }

                //var errorCount = head.Errors != null ? head.Errors.Count : 0;
                //if (errorCount > minErrorCount)
                //{
                //    //discard heads with more errors
                //    continue;
                //}

                if (head.AmbiguityAggregator == null)
                {
                    targetHeads.Add(head);
                    continue;
                }

                //aggregate ambiguities
                bool isAggregated = false;
                foreach (var ambhead in m_aggregatingHeads)
                {
                    if (ambhead.TopStackStateIndex == head.TopStackStateIndex &&
                        ambhead.AmbiguityAggregator.ProductionIndex == head.AmbiguityAggregator.ProductionIndex &&
                        ParserHead.ShareSameParent(ambhead, head))
                    {
                        var aggregator = ambhead.AmbiguityAggregator;

                        //if the head have different errors, they are probably came from error recovery
                        //in this case, the aggregation just keep the first one and discard others
                        //this way won't increase the total amount of errors
                        if (ambhead.HasSameErrorsWith(head))
                        {
                            //update aggregate value
                            ambhead.TopStackValue = aggregator.Aggregate(ambhead.TopStackValue, head.TopStackValue);
                        }

                        //discard he aggregated head
                        isAggregated = true;

                        break;
                    }
                }

                if (!isAggregated)
                {
                    m_aggregatingHeads.Add(head);
                    targetHeads.Add(head);
                }
            }

            foreach (var aggHead in m_aggregatingHeads)
            {
                aggHead.AmbiguityAggregator = null;
            }

            m_aggregatingHeads.Clear();
        }
Exemplo n.º 10
0
        private void RecoverError(Lexeme z)
        {
            List <ParserHead> shiftedHeads = m_shiftedHeads;

            m_heads.Clear();
            int errorHeadCount = m_errorCandidates.Count;

            Debug.Assert(errorHeadCount > 0);

            if (errorHeadCount > c_panicRecoveryThreshold)
            {
                //Panic recovery
                //to the 1st head:
                //pop stack until there's a state S, which has a Goto action of a non-terminal A
                //discard input until there's an token a in Follow(A)
                //push Goto(s, A) into stack
                //discard all other heads

                m_heads.Clear();
                m_heads.AddRange(shiftedHeads.Where(h => h.ErrorRecoverLevel == 0));
                shiftedHeads.Clear();

                ParserHead errorHead1 = m_errorCandidates[0];
                m_errorCandidates.Clear();

                IProduction p = errorHead1.PanicRecover(m_transitions, z.Value.Span);

                var follow = (p as ProductionBase).Info.Follow;

                m_heads.Add(errorHead1);

                throw new PanicRecoverException(follow);
            }

            for (int i = 0; i < errorHeadCount; i++)
            {
                var head = m_errorCandidates[i];

                if (!z.IsEndOfStream)
                {
                    //option 1: remove
                    //remove current token and continue
                    var deleteHead = head.Clone();

                    deleteHead.IncreaseErrorRecoverLevel();
                    deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span)
                    {
                        ErrorArgument = z.Value
                    });

                    shiftedHeads.Add(deleteHead);

                    //option 2: replace
                    //replace the current input char with all possible shifts token and continue
                    ReduceAndShiftForRecovery(z, head, shiftedHeads, m_errorDef.TokenMistakeId);
                }

                //option 3: insert
                //insert all possible shifts token and continue
                ReduceAndShiftForRecovery(z, head, m_heads, m_errorDef.TokenMissingId);
            }
        }