/// <summary> /// The main method to call when parsing /// </summary> /// <param name="input"></param> public void Parse(string input) { m_err.Clear(); m_consumed.Clear(); try { m_sym = m_lexer.Tokenize(input); } catch (LexerException ex) { throw new ParserException("Parsing failed (Lexer)", ex); } // Stmt == Terminating rule //Stmt(); Prg(); // If we still have symbols in the stream, parsing failed if (m_sym.Count > 0) { throw new ParserException("Parsing failed (Unmatched tokens in stream)"); } //m_err.Enqueue(new Error { Message = "Syntax Error - Unmatched tokens", Type = ErrorType.SyntaxError }); // If parsing failed, reset the tree if (m_err.Count > 0) { m_tree = null; } }
public PQueue <Token> Tokenize(string input) { var queue = new PQueue <Token>(); foreach (var token in LexerOutput) { queue.Enqueue(token); } return(queue); }
public PQueue <Token> Tokenize(string input) { var output = new PQueue <Token>(); index = 0; this.input = input; Token nextToken; while ((nextToken = FetchNextToken()) != null) { if (nextToken.TokenType != TokenType.WHITESPACE) { output.Enqueue(nextToken); } } return(output); }