Ejemplo n.º 1
0
        public override Parse <TFuture> BuildParse <TFuture>(Future <string, TFuture> future)
        {
            Parse <TFuture> parse = null;

            parse = scanner =>
            {
                // 保存读取当前单词之前的位置
                ForkableScanner prevScanner = scanner.Fork();

                Lexeme l = scanner.Read();

                if (l.TokenIndex == m_token.Index)
                {
                    return(new StepResult <TFuture>(0, () => future(l.Value.Content)(scanner)));
                }

                Lexeme      recovery     = l.GetErrorCorrectionLexeme(m_token.Index, m_token.Description);
                SyntaxError insertionErr = Grammar.RecoverByInsertion(recovery);

                if (l.IsEndOfStream)
                {
                    // 已经到了输入的末尾
                    // 插入预期的Token进行恢复
                    return(new StepResult <TFuture>(1,
                                                    () => future(recovery.Value.Content)(prevScanner), insertionErr));
                }
                else
                {
                    // 同时尝试插入预期的Token,以及删除当前字符
                    // 在未来的解析中选取更好的路线
                    return(Grammar.Best(
                               new StepResult <TFuture>(1, () => future(recovery.Value.Content)(prevScanner), insertionErr),
                               new StepResult <TFuture>(1, () => parse(scanner), Grammar.RecoverByDeletion(l))));
                }
            };

            return(parse);
        }
Ejemplo n.º 2
0
        private void ReduceAndShiftForRecovery(Lexeme z, ParserHead head, IList <ParserHead> shiftTarget, int syntaxError, CancellationToken ctoken)
        {
            Queue <ParserHead> recoverQueue = new Queue <ParserHead>();

            for (int j = 0; j < m_transitions.TokenCount - 1; j++)
            {
                recoverQueue.Enqueue(head);

                while (recoverQueue.Count > 0)
                {
                    var recoverHead = recoverQueue.Dequeue();

                    int recoverStateNumber = recoverHead.TopStackStateIndex;

                    var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);

                    var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
                    var recoverShift  = recoverShifts;

                    while (recoverShift != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        var insertHead = recoverHead.Clone();

                        var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
                        insertHead.Shift(insertLexeme, recoverShift.Value);
                        insertHead.IncreaseErrorRecoverLevel();
                        insertHead.AddError(new ErrorRecord(syntaxError, z.Value.Span)
                        {
                            ErrorArgument  = insertLexeme.Value,
                            ErrorArgument2 = z.Value
                        });

                        shiftTarget.Add(insertHead);

                        recoverShift = recoverShift.GetNext();
                    }

                    var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);

                    var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
                    var recoverReduce  = recoverReduces;

                    while (recoverReduce != null)
                    {
                        ctoken.ThrowIfCancellationRequested();

                        int         productionIndex = recoverReduce.Value;
                        IProduction production      = m_transitions.NonTerminals[productionIndex];

                        var reducedHead = recoverHead.Clone();

                        reducedHead.Reduce(production, m_reducer, z);

                        //add back to queue, until shifted
                        m_recoverReducedHeads.Add(reducedHead);

                        //get next reduce
                        recoverReduce = recoverReduce.GetNext();
                    }

                    if (m_recoverReducedHeads.Count > 0)
                    {
                        m_tempHeads.Clear();
                        m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
                        m_recoverReducedHeads.Clear();

                        foreach (var recoveredHead in m_tempHeads)
                        {
                            recoverQueue.Enqueue(recoveredHead);
                        }
                    }
                }
            }
        }
Ejemplo n.º 3
0
        private void RecoverError(Lexeme z)
        {
            List <ParserHead> shiftedHeads = m_shiftedHeads;

            m_heads.Clear();
            int errorHeadCount = m_errorCandidates.Count;

            Debug.Assert(errorHeadCount > 0);

            for (int i = 0; i < errorHeadCount; i++)
            {
                var head = m_errorCandidates[i];

                //option 1: remove
                //remove current token and continue
                if (!z.IsEndOfStream)
                {
                    var deleteHead = head.Clone();

                    deleteHead.IncreaseErrorRecoverLevel();
                    deleteHead.AddError(new ErrorRecord(m_errorDef.TokenUnexpectedId, z.Value.Span)
                    {
                        ErrorArgument = z.Value
                    });

                    shiftedHeads.Add(deleteHead);
                }

                //option 2: insert
                //insert all possible shifts token and continue
                Queue <ParserHead> recoverQueue = new Queue <ParserHead>();

                for (int j = 0; j < m_transitions.TokenCount - 1; j++)
                {
                    recoverQueue.Enqueue(head);

                    while (recoverQueue.Count > 0)
                    {
                        var recoverHead        = recoverQueue.Dequeue();
                        int recoverStateNumber = recoverHead.TopStackStateIndex;

                        var shiftLexer = m_transitions.GetLexersInShifting(recoverStateNumber);

                        int tokenIndex;
                        if (shiftLexer == null)
                        {
                            tokenIndex = z.TokenIndex;
                        }
                        else
                        {
                            tokenIndex = z.GetTokenIndex(shiftLexer.Value);
                        }

                        var recoverShifts = m_transitions.GetShift(recoverStateNumber, j);
                        var recoverShift  = recoverShifts;

                        while (recoverShift != null)
                        {
                            var insertHead = recoverHead.Clone();

                            var insertLexeme = z.GetErrorCorrectionLexeme(j, m_transitions.GetTokenDescription(j));
                            insertHead.Shift(insertLexeme, recoverShift.Value);
                            insertHead.IncreaseErrorRecoverLevel();
                            insertHead.AddError(new ErrorRecord(m_errorDef.TokenMissingId, z.Value.Span)
                            {
                                ErrorArgument = insertLexeme.Value
                            });

                            m_heads.Add(insertHead);

                            recoverShift = recoverShift.GetNext();
                        }

                        var reduceLexer = m_transitions.GetLexersInReducing(recoverStateNumber);

                        if (reduceLexer == null)
                        {
                            tokenIndex = z.TokenIndex;
                        }
                        else
                        {
                            tokenIndex = z.GetTokenIndex(reduceLexer.Value);
                        }

                        var recoverReduces = m_transitions.GetReduce(recoverStateNumber, j);
                        var recoverReduce  = recoverReduces;

                        while (recoverReduce != null)
                        {
                            int         productionIndex = recoverReduce.Value;
                            IProduction production      = m_transitions.NonTerminals[productionIndex];

                            var reducedHead = recoverHead.Clone();

                            reducedHead.Reduce(production, m_reducer, z);

                            //add back to queue, until shifted
                            m_recoverReducedHeads.Add(reducedHead);

                            //get next reduce
                            recoverReduce = recoverReduce.GetNext();
                        }

                        if (m_recoverReducedHeads.Count > 0)
                        {
                            m_tempHeads.Clear();
                            m_cleaner.CleanHeads(m_recoverReducedHeads, m_tempHeads);
                            m_recoverReducedHeads.Clear();

                            foreach (var recoveredHead in m_tempHeads)
                            {
                                recoverQueue.Enqueue(recoveredHead);
                            }
                        }
                    }
                }
            }
        }