/// <summary> /// Based on: http://www.goldparser.org/doc/engine-pseudo/parse-token.htm /// </summary> /// <param name="tokenIterator">Token iterator which will be owned by the caller</param> /// <param name="debugger">Enables debugging support</param> /// <param name="trimReductions">If true (default), trim reductions of the form L -> R, where R is a non-terminal</param> /// <param name="allowRewriting">Apply rewriting functions</param> /// <returns>The reduced program tree on acceptance or the erroneous token</returns> public async Task<Token> ParseInputAsync(IAsyncLAIterator<Token> tokenIterator, Debug debugger, bool trimReductions = true, bool allowRewriting = true) { const int initState = 0; var tokenStack = new Stack<Token>(); var state = initState; while (true) { var token = await tokenIterator.LookAheadAsync(); var action = ParseTable.Actions[state, token.ID + 1]; debugger.DumpParsingState(state, tokenStack, token, action); switch (action.ActionType) { case ActionType.Shift: state = action.ActionParameter; token.State = state; tokenStack.Push(token); await tokenIterator.MoveNextAsync(); break; case ActionType.Reduce: var nProduction = action.ActionParameter; var production = Productions[nProduction]; var nChildren = production.Right.Length; Token reduction; if (trimReductions && nChildren == 1 && _nonterminals.Contains(production.Right[0])) { reduction = new Token(production.Left, tokenStack.Pop().Content); } else { var children = new Token[nChildren]; for (var i = 0; i < nChildren; i++) { children[nChildren - i - 1] = tokenStack.Pop(); } var rewrite = (allowRewriting ? production.Rewrite(children) : null) ?? new Reduction(nProduction, children); reduction = new Token(production.Left, rewrite); } var lastState = tokenStack.Count > 0 ? tokenStack.Peek().State : initState; state = ParseTable.Actions[lastState, production.Left + 1].ActionParameter; reduction.State = production.Left; tokenStack.Push(reduction); if (tokenStack.Count == 1 && tokenStack.Peek().ID == 0) { return tokenStack.Pop(); } break; case ActionType.Error: return token; case ActionType.ErrorRR: throw new InvalidOperationException("Reduce-Reduce conflict in grammar: " + token); case ActionType.ErrorSR: throw new InvalidOperationException("Shift-Reduce conflict in grammar: " + token); } debugger.Flush(); } }
/// <summary> /// Based on: http://www.goldparser.org/doc/engine-pseudo/parse-Item.htm /// </summary> /// <param name="tokenIterator">Item iterator which will be owned by the caller</param> /// <param name="debugger">Enables debugging support</param> /// <param name="trimReductions">If true (default), trim reductions of the form L -> R, where R is a non-terminal</param> /// <param name="allowRewriting">Apply rewriting functions</param> /// <returns>The reduced program tree on acceptance or the erroneous Item</returns> public async Task <Item> ParseInputAsync(IAsyncLAIterator <Item> tokenIterator, Debug debugger, bool trimReductions = true, bool allowRewriting = true) { const int initState = 0; var tokenStack = new Stack <Item>(); var state = initState; while (true) { var token = await tokenIterator.LookAheadAsync(); var action = ParseTable.Actions[state, token.ID + 1]; debugger.DumpParsingState(state, tokenStack, token, action); switch (action.ActionType) { case ActionType.Shift: state = action.ActionParameter; token.State = state; tokenStack.Push(token); await tokenIterator.MoveNextAsync(); break; case ActionType.Reduce: var nProduction = action.ActionParameter; var production = Productions[nProduction]; var nChildren = production.Right.Length; Item reduction; if (trimReductions && nChildren == 1 && _nonterminals.Contains(production.Right[0])) { reduction = new Item(production.Left, tokenStack.Pop().Content); } else { var children = new Item[nChildren]; for (var i = 0; i < nChildren; i++) { children[nChildren - i - 1] = tokenStack.Pop(); } var rewrite = (allowRewriting ? production.Rewrite(children) : null) ?? new Reduction(nProduction, children); reduction = new Item(production.Left, rewrite); } var lastState = tokenStack.Count > 0 ? tokenStack.Peek().State : initState; state = ParseTable.Actions[lastState, production.Left + 1].ActionParameter; reduction.State = production.Left; tokenStack.Push(reduction); if (tokenStack.Count == 1 && tokenStack.Peek().ID == 0) { return(tokenStack.Pop()); } break; case ActionType.Error: token.State = state < 0 ? state : -state; return(token); case ActionType.ErrorRR: throw new InvalidOperationException("Reduce-Reduce conflict in grammar: " + token); case ActionType.ErrorSR: throw new InvalidOperationException("Shift-Reduce conflict in grammar: " + token); } debugger.Flush(); } }
public async Task <bool> MoveNextAsync() { var buffer = new StringBuilder(4); var statePatterns = _patternTable[_states.Peek()]; var matchingPatterns = new List <RegexTuple>(statePatterns); do { if (!await _charSource.MoveNextAsync()) { return(false); } var textWithLA = await FillBufferAsync(buffer); for (var i = 0; i < matchingPatterns.Count; i++) { var pattern = matchingPatterns[i]; var match = pattern.Item2.Match(textWithLA); if (!match.Success || match.Length == 0) { matchingPatterns.RemoveAt(i--); } } } while (matchingPatterns.Count > 1); if (matchingPatterns.Count == 0) { return(false); } var matchingPattern = matchingPatterns[0]; do { var lookAhead = await FillLookAheadAsync(buffer, true); if (lookAhead == null) { break; } var textWithLA = buffer.ToString(); var match = matchingPattern.Item2.Match(textWithLA); if (!match.Success || match.Length < textWithLA.Length) { RemoveLookAhead(buffer, lookAhead); break; } if (!await _charSource.MoveNextAsync()) { break; } } while (true); _currentItem = new Item(matchingPattern.Item1, buffer.ToString()); var instructionList = matchingPattern.Item3; if (!string.IsNullOrEmpty(instructionList)) { foreach (var instruction in instructionList.Split(new[] { ',' }, StringSplitOptions.RemoveEmptyEntries)) { switch (instruction) { case PopState: _states.Pop(); break; case Ignore: return(await MoveNextAsync()); default: _states.Push(matchingPattern.Item3); break; } } } return(true); }