public static void wipeCharPositionInfoAndWhitespaceTokens(CodeBuffTokenStream tokens) { tokens.Fill(); CommonToken dummy = new CommonToken(TokenConstants.InvalidType, ""); dummy.Channel = TokenConstants.HiddenChannel; Token firstRealToken = tokens.getNextRealToken(-1); for (int i = 0; i < tokens.Size; i++) { if (i == firstRealToken.TokenIndex) { continue; // don't wack first token } CommonToken t = (CommonToken)tokens.Get(i); Regex rex = new Regex("^\\s+$"); if (rex.IsMatch(t.Text)) { tokens.GetTokens()[i] = dummy; // wack whitespace token so we can't use it during prediction } else { t.Line = 0; t.Column = -1; } } }
public static CodeBuffTokenStream tokenize(string doc, Type lexerClass) { ANTLRInputStream input = new ANTLRInputStream(doc); Lexer lexer = getLexer(lexerClass, input); CodeBuffTokenStream tokens = new CodeBuffTokenStream(lexer); tokens.Fill(); return(tokens); }