antlr.IToken CreateToken(antlr.IToken prototype, int newTokenType, string newTokenText) { return(new BooToken(newTokenType, newTokenText, prototype.getFilename(), prototype.getLine(), prototype.getColumn() + SafeGetLength(prototype.getText()))); }
void ProcessNextTokens() { ResetBuffer(); antlr.IToken token = BufferUntilNextNonWhiteSpaceToken(); FlushBuffer(token); CheckForEOF(token); ProcessNextNonWhiteSpaceToken(token); }
void CheckForEOF(antlr.IToken token) { if (antlr.Token.EOF_TYPE != token.Type) { return; } EnqueueEOS(token); while (CurrentIndentLevel > 0) { EnqueueDedent(); _indentStack.Pop(); } }
public static double ParseDouble(antlr.IToken token, string s, bool isSingle) { try { return(TryParseDouble(isSingle, s)); } catch (Exception x) { LexicalInfo sourceLocation = ToLexicalInfo(token); GenericParserError(sourceLocation, x); // let the parser continue return(double.NaN); } }
public static TimeSpan ParseTimeSpan(antlr.IToken token, string text) { try { return(TryParseTimeSpan(token, text)); } catch (System.OverflowException x) { LexicalInfo sourceLocation = ToLexicalInfo(token); GenericParserError(sourceLocation, x); // let the parser continue return(TimeSpan.Zero); } }
public static IntegerLiteralExpression ParseIntegerLiteralExpression(antlr.IToken token, string text, bool asLong) { try { return(TryParseIntegerLiteralExpression(token, text, asLong)); } catch (System.OverflowException x) { LexicalInfo sourceLocation = ToLexicalInfo(token); GenericParserError(sourceLocation, x); // let the parser continue return(new IntegerLiteralExpression(sourceLocation)); } }
public static void Main(string[] args) { try { T lexer = new T(new CharBuffer(Console.In)); bool done = false; while (!done) { IToken tok = lexer.nextToken(); Console.Out.WriteLine("Token: " + tok); if (tok.Type == Token.EOF_TYPE) { done = true; } } Console.Out.WriteLine("done lexing..."); } catch (Exception e) { Console.Error.WriteLine("exception: " + e); } }
antlr.IToken BufferUntilNextNonWhiteSpaceToken() { antlr.IToken token = null; while (true) { token = _istream.nextToken(); int ttype = token.Type; if (antlr.Token.SKIP == ttype) { continue; } if (_wsTokenType == ttype) { _buffer.Append(token.getText()); continue; } break; } return(token); }
private static TimeSpan TryParseTimeSpan(antlr.IToken token, string text) { if (text.EndsWith("ms")) { return(TimeSpan.FromMilliseconds( ParseDouble(token, text.Substring(0, text.Length - 2)))); } char last = text[text.Length - 1]; double value = ParseDouble(token, text.Substring(0, text.Length - 1)); switch (last) { case 's': return(TimeSpan.FromSeconds(value)); case 'h': return(TimeSpan.FromHours(value)); case 'm': return(TimeSpan.FromMinutes(value)); case 'd': return(TimeSpan.FromDays(value)); } throw new ArgumentException(text, "text"); }
public static double ParseDouble(antlr.IToken token, string s) { return(ParseDouble(token, s, false)); }
public static int ParseInt(antlr.IToken token) { return((int)ParseIntegerLiteralExpression(token, token.getText(), false).Value); }
void ProcessNextNonWhiteSpaceToken(antlr.IToken token) { _lastNonWsToken = token; Enqueue(token); }
void FlushBuffer(antlr.IToken token) { if (0 == _buffer.Length) { return; } string text = _buffer.ToString(); string[] lines = text.Split(NewLineCharArray); if (lines.Length > 1) { string lastLine = lines[lines.Length - 1]; // Protect against mixed indentation issues /* * if (String.Empty != lastLine) { * if (null == _expectedIndent) { * _expectedIndent = lastLine.Substring(0, 1); * } * * if (String.Empty != lastLine.Replace(_expectedIndent, String.Empty)) * { * string literal = _expectedIndent == "\t" * ? "tabs" * : _expectedIndent == "\f" * ? "form feeds" // The lexer allows them :p * : "spaces"; * * throw new TokenStreamRecognitionException( * new RecognitionException( * "Mixed indentation, expected the use of " + literal, * token.getFilename(), * token.getLine(), * // Point exactly to the first invalid char * lastLine.Length - lastLine.TrimStart(_expectedIndent[0]).Length + 1 * ) * ); * } * } */ if (lastLine.Length > CurrentIndentLevel) { EnqueueIndent(token); _indentStack.Push(lastLine.Length); } else if (lastLine.Length < CurrentIndentLevel) { EnqueueEOS(token); do { EnqueueDedent(); _indentStack.Pop(); }while (lastLine.Length < CurrentIndentLevel); } else { EnqueueEOS(token); } } }
void EnqueueEOS(antlr.IToken prototype) { _pendingTokens.Enqueue(CreateToken(prototype, _eosTokenType, "<EOL>")); }
void EnqueueIndent(antlr.IToken prototype) { _pendingTokens.Enqueue(CreateToken(prototype, _indentTokenType, "<INDENT>")); }
void Enqueue(antlr.IToken token) { _pendingTokens.Enqueue(token); }