State Parse(IEnumerable <Type_Token> tokens, int initialState) { List <State> stack = new List <State>(); { State init = new State(); init.state = initialState; stack.Add(init); } int state = initialState; using (IEnumerator <Type_Token> enumerator = tokens.GetEnumerator()) { bool haveToken = enumerator.MoveNext(); while (haveToken) { Type_Token token = enumerator.Current; TokenType type = GetTokenType(token); if (type == TokenType.Error) { state = ReduceError(stack, enumerator); } else { int offset = _transitionTable[state]; int action = offset <= 10 ? offset : _transitionTable[offset + (int)type]; if (action > 10) { State newState = new State(); newState.state = state = action - 10; newState.value = token; stack.Add(newState); haveToken = enumerator.MoveNext(); } else if (action > 1) { int reductionId = action - 2; state = Reduce(reductionId, stack); } else if (action == 0 || type != TokenType.EOF) { state = ReduceError(stack, enumerator); } else { return(stack[1]); } } } } throw new InvalidOperationException("ran out of tokens, somehow"); }
protected abstract Type_Value Reduce_Element_1(Type_Token labelSeg);
protected abstract Type_Object Reduce_Value_1(Type_Token numberSeg);
protected abstract Type_Object Reduce_FactorOp_2(Type_Token divideSeg);
protected abstract Type_Object Reduce_TermOp_2(Type_Token subtractSeg);
protected abstract Type_Object Reduce_ComparisonOp_6(Type_Token greaterThanOrEqualToSeg);
protected abstract Type_Object Reduce_ComparisonOp_4(Type_Token lessThanOrEqualToSeg);
protected abstract Type_Object Reduce_ComparisonOp_2(Type_Token notEqualToSeg);
protected abstract Type_Statement Reduce_If_1(Type_Token ifKeywordSeg, Type_Statement statementSeg);
protected abstract Type_Value Reduce_Document_1(Type_Token aSeg1, Type_Token aSeg2);
protected abstract Type_Token Reduce_Alt_1(Type_Token midSeg);
protected abstract Type_Token Reduce_Document_2(Type_Token altSeg);
protected abstract Type_Segment Reduce_L_2(Type_Segment lSeg, Type_Token aSeg, Type_Token bSeg);
State Parse(IEnumerable <Type_Token> tokens, int initialState) { List <State> stack = new List <State>(); { State init = new State(); init.state = initialState; stack.Add(init); } _traceBuilder = null; int state = initialState; using (IEnumerator <Type_Token> enumerator = tokens.GetEnumerator()) { bool haveToken = enumerator.MoveNext(); while (haveToken) { Type_Token token = enumerator.Current; TokenType type = GetTokenType(token); { int offset = _transitionTable[state]; int action = offset <= 20 ? offset : _transitionTable[offset + (int)type]; if (action > 20) { WriteTrace(state, type, "Shift"); State newState = new State(); newState.state = state = action - 20; newState.value = token; stack.Add(newState); haveToken = enumerator.MoveNext(); } else if (action > 1) { int reductionId = action - 2; WriteTrace(state, type, ReductionString(reductionId)); state = Reduce(reductionId, stack); } else if (action == 0 || type != TokenType.EOF) { WriteTrace(state, type, "Error"); UnexpectedToken(token); throw new InvalidOperationException("unexpected token: " + type); } else { WriteTrace(state, type, "Accept"); return(stack[1]); } } } } throw new InvalidOperationException("ran out of tokens, somehow"); }
protected abstract Type_Value Reduce_Element_2(Type_Token errorSeg);
protected virtual void UnexpectedToken(Type_Token terminal) { }
protected abstract TokenType GetTokenType(Type_Token terminal);
protected abstract Type_Statement Reduce_If_2(Type_Token ifKeywordSeg, Type_Statement statementSeg1, Type_Token elseKeywordSeg, Type_Statement statementSeg2);
protected abstract Type_Object Reduce_ComparisonOp_3(Type_Token lessThanSeg);
protected abstract Type_Value Reduce_Document_2(Type_Token bSeg);
protected abstract Type_Object Reduce_ComparisonOp_5(Type_Token greaterThanSeg);
int ReduceError(List <State> stack, IEnumerator <Type_Token> enumerator) { Type_Token errorToken = enumerator.Current; if (GetTokenType(enumerator.Current) != TokenType.EOF) { if (!enumerator.MoveNext()) { throw new InvalidOperationException("ran out of tokens while attempting to recover from a parse error."); } } bool[] failed = new bool[4]; do { int state = stack[stack.Count - 1].state; int offset = _transitionTable[state]; int action = offset <= 10 ? offset : _transitionTable[offset + (int)TokenType.Error]; if (action == 0 || action > 10) { break; } else { int reductionId = action - 2; state = Reduce(reductionId, stack); } }while (true); do { TokenType nextType = GetTokenType(enumerator.Current); if (!failed[(int)nextType]) { for (int i = stack.Count - 1; i >= 0; i--) { int state = stack[i].state; int offset = _transitionTable[state]; if (offset <= 10) { continue; } int action = _transitionTable[offset + (int)TokenType.Error] - 10; if (action <= 0) { continue; } if (!CanBeFollowedBy(stack, i, action, nextType)) { continue; } State newState = new State(); newState.state = action; newState.value = errorToken; stack.RemoveRange(i + 1, stack.Count - i - 1); stack.Add(newState); return(action); } failed[(int)nextType] = true; } if (nextType == TokenType.EOF) { throw new InvalidOperationException("unexpected token: " + GetTokenType(errorToken)); } }while (enumerator.MoveNext()); throw new InvalidOperationException("ran out of tokens while attempting to recover from a parse error."); }
protected abstract Type_Object Reduce_TermOp_1(Type_Token addSeg);
protected abstract Type_Segment Reduce_X_1(Type_Token xSeg);
protected abstract Type_Object Reduce_FactorOp_1(Type_Token multiplySeg);
protected abstract Type_Segment Reduce_X_2(Type_Token errorSeg);
protected abstract Type_Object Reduce_Factor_1(Type_Object valueSeg1, Type_Token powerSeg, Type_Object valueSeg2);
protected abstract Type_Segment Reduce_Y_1(Type_Token ySeg);
protected abstract Type_Object Reduce_Value_2(Type_Token openParenSeg, Type_Object expressionSeg, Type_Token closeParenSeg);
protected abstract Type_List Reduce_List_1(Type_List listSeg, Type_Token commaSeg, Type_Value elementSeg);