/// <summary> This constructor is used by the method "generateParseException" /// in the generated parser. Calling this constructor generates /// a new object of this type with the fields "currentToken", /// "expectedTokenSequences", and "tokenImage" set. The boolean /// flag "specialConstructor" is also set to true to indicate that /// this constructor was used to create this object. /// This constructor calls its super class with the empty string /// to force the "toString" method of parent class "Throwable" to /// print the error message in the form: /// ParseException: <result of getMessage> /// </summary> public ParseException(Token currentTokenVal, int[][] expectedTokenSequencesVal, System.String[] tokenImageVal) : base("") { specialConstructor = true; currentToken = currentTokenVal; expectedTokenSequences = expectedTokenSequencesVal; tokenImage = tokenImageVal; }
public StandardTokenizer(CharStream stream) { token_source = new StandardTokenizerTokenManager(stream); token = new Token(); jj_ntk = - 1; jj_gen = 0; for (int i = 0; i < 1; i++) jj_la1[i] = - 1; }
public Token GetNextToken() { if (token.next != null) token = token.next; else token = token.next = token_source.GetNextToken(); jj_ntk = - 1; jj_gen++; return token; }
private Token Jj_consume_token(int kind) { Token oldToken; if ((oldToken = token).next != null) token = token.next; else token = token.next = token_source.GetNextToken(); jj_ntk = - 1; if (token.kind == kind) { jj_gen++; return token; } token = oldToken; jj_kind = kind; throw GenerateParseException(); }
public virtual void ReInit(StandardTokenizerTokenManager tm) { token_source = tm; token = new Token(); jj_ntk = - 1; jj_gen = 0; for (int i = 0; i < 1; i++) jj_la1[i] = - 1; }
public virtual void ReInit(CharStream stream) { token_source.ReInit(stream); token = new Token(); jj_ntk = - 1; jj_gen = 0; for (int i = 0; i < 1; i++) jj_la1[i] = - 1; }
/// <summary>Returns the next token in the stream, or null at EOS. /// <p>The returned token's type is set to an element of {@link /// StandardTokenizerConstants#tokenImage}. /// </summary> public override Lucene.Net.Analysis.Token Next() { Token token = null; switch ((jj_ntk == - 1) ? Jj_ntk() : jj_ntk) { case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.ALPHANUM: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.ALPHANUM); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.APOSTROPHE: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.APOSTROPHE); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.ACRONYM: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.ACRONYM); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.COMPANY: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.COMPANY); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.EMAIL: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.EMAIL); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.HOST: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.HOST); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.NUM: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.NUM); break; case Lucene.Net.Analysis.Standard.StandardTokenizerConstants.CJ: token = Jj_consume_token(Lucene.Net.Analysis.Standard.StandardTokenizerConstants.CJ); break; case 0: token = Jj_consume_token(0); break; default: jj_la1[0] = jj_gen; Jj_consume_token(- 1); throw new ParseException(); } if (token.kind == Lucene.Net.Analysis.Standard.StandardTokenizerConstants.EOF) { { if (true) return null; } } else { { if (true) return new Lucene.Net.Analysis.Token(token.image, token.beginColumn, token.endColumn, Lucene.Net.Analysis.Standard.StandardTokenizerConstants.tokenImage[token.kind]); } } throw new System.ApplicationException("Missing return statement in function"); }