public Tokenizer(SourceFile sourceFile, Report report) { this.sourceFile = sourceFile; Report = report; Col = 1; Ln = 1; textReader = sourceFile.GetReader(); Initialize(); }
public MultiFileTokenizer(SourceFile[] files, Report report) : this(makeCU(files), report) { }
private static SourceUnit makeCU(SourceFile[] files) { var unit = new SourceUnit(); foreach (SourceFile file in files) { unit.Add(file); } return unit; }
public TokenizerEventArgs(ITokenizer tokenizer, SourceFile sourceFile) { Tokenizer = tokenizer; SourceFile = sourceFile; }
private static TokenInfo badToken(string value, SourceFile sourceFile, int ln, int col) { return new TokenInfo(Token.BAD_TOKEN, value, sourceFile, ln, col); }
private static TokenInfo space(SourceFile sourceFile, int ln, int col) { return new TokenInfo(Token.SPACE, TokenStringConstants.SPACE, sourceFile, ln, col); }
private static TokenInfo eof(SourceFile sourceFile, int ln, int col) { return new TokenInfo(Token.EOF, TokenStringConstants.EOF, sourceFile, ln, col); }
private static TokenInfo number(StringBuilder sb, SourceFile sourceFile, int ln, int col) { return new TokenInfo(Token.Number, sb.ToString(), sourceFile, ln, col); }
private static TokenInfo reservedWord(string value, SourceFile sourceFile, int ln, int col) { var t = (Token) Enum.Parse(typeof (Token), value, true); return new TokenInfo(t, value, sourceFile, ln, col); }