public PQueue <Token> Tokenize(string input) { output = new PQueue <Token>(); TempPreprocess(ref input); index = 0; this.input = input; Token nextToken; while ((nextToken = FetchNextToken()) != null) { if (nextToken.TokenType != TokenType.WHITESPACE) { output.Enqueue(nextToken); } // If this is a new line, calculate the indentation level: if (nextToken.TokenType == TokenType.NEWLINE) { GenerateIndentDedentTokens(); } } return(output); }
/// <summary> /// The main method to call when parsing /// </summary> /// <param name="input"></param> public void Parse(string input) { m_err.Clear(); m_consumed.Clear(); try { m_sym = m_lexer.Tokenize(input); } catch (LexerException ex) { throw new ParserException("Parsing failed", ex); } // Stmt == Terminating rule //Stmt(); Prg(); // If we still have symbols in the stream, parsing failed if (m_sym.Count > 0) { m_err.Enqueue(new Error { Message = "Syntax Error - Unmatched tokens", Type = ErrorType.SyntaxError }); } // If parsing failed, reset the tree if (m_err.Count > 0) { m_tree = null; } }