antlr.IToken CreateToken(antlr.IToken prototype, int newTokenType, string newTokenText) { return(new BooToken(newTokenType, newTokenText, prototype.getFilename(), prototype.getLine(), prototype.getColumn() + SafeGetLength(prototype.getText()))); }
static void Main(string[] args) { CSharpLexer lexer = new CSharpLexer(new StringReader(program)); antlr.IToken token = null; while ((token = lexer.nextToken()).Type != CSharpLexer.EOF) { Console.WriteLine("Token: '{0}', Type: {1}.", token.getText(), TokenClassification.Instance.getTokenType(token.Type) ); } }
private Tuple <int, int> CalculateEndpoint(antlr.IToken token, int endLine, int endIndex, int delimiterLength) { var startIndex = positionMap[token.getLine() - 1][token.getColumn() - 1]; var startLine = token.getLine() - 1; if (startLine > endLine || startLine == endLine && startIndex > endIndex) { whitespaces.Add(new TextSpan { iStartLine = endLine, iStartIndex = endIndex, iEndLine = startLine, iEndIndex = startIndex }); } endLine = startLine - 1; endIndex = 0; var runningIndex = startIndex + delimiterLength; foreach (var part in token.getText().Split(new[] { "\r\n" }, StringSplitOptions.None)) { endLine++; endIndex = runningIndex + part.Length; runningIndex = 0; } endIndex += delimiterLength; //endIndex = positionMap[endLine][endIndex]; var cluster = new MappedToken( startLine * lineSize + startIndex, endIndex - startIndex); if (tokenMap.Count > 0 && tokenMap[tokenMap.Count() - 1].Index >= cluster.Index) { throw new ArgumentException("Token Mapping order"); } tokenMap.Add(cluster); return(new Tuple <int, int>(endLine, endIndex)); }
antlr.IToken BufferUntilNextNonWhiteSpaceToken() { antlr.IToken token = null; while (true) { token = _istream.nextToken(); int ttype = token.Type; if (antlr.Token.SKIP == ttype) { continue; } if (_wsTokenType == ttype) { _buffer.Append(token.getText()); continue; } break; } return(token); }
private MemberReferenceExpression MemberReferenceForToken(Expression target, IToken memberName) { MemberReferenceExpression mre = new MemberReferenceExpression(ToLexicalInfo(memberName)); mre.Target = target; mre.Name = memberName.getText(); return mre; }
public string getText() { return(iToken.getText()); }
public static SourceLocation ToEndSourceLocation(antlr.IToken token) { string text = token.getText() ?? ""; return(new SourceLocation(token.getLine(), token.getColumn() + text.Length - 1)); }
public static SourceLocation ToEndSourceLocation(antlr.IToken token) { return(new SourceLocation(token.getLine(), token.getColumn() + token.getText().Length - 1)); }
void ProcessNextTokens() { _buffer.Length = 0; antlr.IToken token = null; while (true) { token = _istream.nextToken(); int ttype = token.Type; if (antlr.Token.SKIP == ttype) { continue; } if (_wsTokenType == ttype) { _buffer.Append(token.getText()); continue; } break; } if (0 != _buffer.Length) { string text = _buffer.ToString(); string[] lines = text.Split(NewLineCharArray); if (lines.Length > 1) { string lastLine = lines[lines.Length - 1]; if (lastLine.Length > CurrentIndentLevel) { EnqueueIndent(token); _indentStack.Push(lastLine.Length); } else if (lastLine.Length < CurrentIndentLevel) { EnqueueEOS(token); do { EnqueueDedent(); _indentStack.Pop(); }while (lastLine.Length < CurrentIndentLevel); } else { EnqueueEOS(token); } } } if (antlr.Token.EOF_TYPE == token.Type) { EnqueueEOS(token); while (CurrentIndentLevel > 0) { EnqueueDedent(); _indentStack.Pop(); } } _lastNonWsToken = token; Enqueue(token); }