private TerminalToken SkipAfterCommentEnd() { int commentDepth = 1; TerminalToken token = null; while (commentDepth > 0) { token = tokenizer.RetrieveToken(); if (token.Symbol is SymbolCommentEnd) { commentDepth--; } else if (token.Symbol is SymbolCommentStart) { commentDepth++; } else if (token.Symbol is SymbolEnd) { FireEOFError(); break; } } if (commentDepth == 0) { return(token); } else { return(null); } }
private bool ProcessCommentStart(TerminalToken token) { if (OnCommentRead == null) { return(SkipAfterCommentEnd() != null); } else { Location start = this.tokenizer.GetCurrentLocation(); TerminalToken commentEnd = SkipAfterCommentEnd(); bool result = commentEnd != null; if (result) { Location end = this.tokenizer.GetCurrentLocation(); string str = this.tokenizer.GetInput(); int len = end.Position - start.Position; string comment = str.Substring(start.Position, len - commentEnd.Text.Length); CommentReadEventArgs args = new CommentReadEventArgs(token.Text + comment, comment, false); OnCommentRead(this, args); } return(result); } }
private TerminalToken GetLookahead() { if (lookahead != null) { return(lookahead); } do { TerminalToken token = tokenizer.RetrieveToken(); if (token.Symbol is SymbolCommentLine) { if (!ProcessCommentLine(token)) { continueParsing = false; } } else if (token.Symbol is SymbolCommentStart) { if (!ProcessCommentStart(token)) { continueParsing = false; } } else if (token.Symbol is SymbolWhiteSpace) { if (!ProcessWhiteSpace(token)) { continueParsing = false; } } else if (token.Symbol is SymbolError) { if (!ProcessError(token)) { continueParsing = false; } } else { lookahead = token; } if (!continueParsing) { break; } } while (lookahead == null); if ((lookahead != null) && (OnTokenRead != null)) { TokenReadEventArgs args = new TokenReadEventArgs(lookahead); OnTokenRead(this, args); if (args.Continue == false) { continueParsing = false; lookahead = null; } } return(lookahead); }
public ParseErrorEventArgs(TerminalToken unexpectedToken, SymbolCollection expectedTokens) { this.unexpectedToken = unexpectedToken; this.expectedTokens = expectedTokens; this.contin = ContinueMode.Stop; this.nextToken = null; }
private void FireEOFError() { TerminalToken eofToken = new TerminalToken(SymbolCollection.EOF, SymbolCollection.EOF.Name, tokenizer.GetCurrentLocation()); FireParseError(eofToken); }
private void Reset() { stateStack = new StateStack(); stateStack.Push(startState); tokenStack = new TokenStack(); lookahead = null; continueParsing = true; accepted = false; }
private void DoShift(TerminalToken token, ShiftAction action) { stateStack.Push(action.State); tokenStack.Push(token); lookahead = null; if (OnShift != null) { OnShift(this, new ShiftEventArgs(token, action.State)); } }
private bool ProcessError(TerminalToken token) { if (OnTokenError != null) { TokenErrorEventArgs e = new TokenErrorEventArgs(token); OnTokenError(this, e); return(e.Continue); } else { return(false); } }
private void FireParseError(TerminalToken token) { if (OnParseError != null) { ParseErrorEventArgs e = new ParseErrorEventArgs(token, FindExpectedTokens()); OnParseError(this, e); continueParsing = e.Continue != ContinueMode.Stop; lookahead = e.NextToken; if ((e.NextToken != null) && (e.Continue == ContinueMode.Insert)) { tokenizer.SetCurrentLocation(token.Location); } } }
/// <summary> /// Parse the input with tokens and rules. /// </summary> /// <param name="input">The source input</param> /// <returns>The nonterminal token that the input has been reduced to. /// Null if the parse has failed.</returns> public NonterminalToken Parse(String input) { Reset(); tokenizer.SetInput(input); while (continueParsing) { TerminalToken token = GetLookahead(); if (token != null) { ParseTerminal(token); } } if (accepted) { return((NonterminalToken)tokenStack.Pop()); } else { return(null); } }
private bool ProcessCommentLine(TerminalToken token) { if (OnCommentRead == null) { return(SkipToEndOfLine()); } else { Location start = this.tokenizer.GetCurrentLocation(); bool result = SkipToEndOfLine(); if (result) { Location end = this.tokenizer.GetCurrentLocation(); string str = this.tokenizer.GetInput(); int len = end.Position - start.Position; string comment = str.Substring(start.Position, len); CommentReadEventArgs args = new CommentReadEventArgs(token.Text + comment, comment, true); OnCommentRead(this, args); } return(result); } }
private void ParseTerminal(TerminalToken token) { State currentState = stateStack.Peek(); Action action = currentState.Actions.Get(token.Symbol); if (action is ShiftAction) { DoShift(token, (ShiftAction)action); } else if (action is ReduceAction) { DoReduce(token, (ReduceAction)action); } else if (action is AcceptAction) { DoAccept(token, (AcceptAction)action); } else { continueParsing = false; FireParseError(token); } }
public ShiftEventArgs(TerminalToken token, State newState) { this.token = token; this.newState = newState; }
public TokenErrorEventArgs(TerminalToken token) { this.token = token; this.contin = false; }
public TokenReadEventArgs(TerminalToken token) { this.token = token; contin = true; }