public TokenList Tokenize(ITextStream stream, ILexingContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) break; if (ConsumeComment(stream, tokens)) continue; if (ConsumeNewLine(stream, tokens)) continue; if (ConsumeWhitespace(stream)) continue; if (ConsumeInterpolation(stream, tokens)) continue; Token token; if (TryCreateToken(stream, out token)) tokens.Add(token); } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return tokens; }
public TokenList Tokenize(ITextStream stream, ILexingContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) { break; } if (ConsumeComment(stream, tokens)) { continue; } if (ConsumeNewLine(stream, tokens)) { continue; } if (ConsumeWhitespace(stream)) { continue; } if (ConsumeInterpolation(stream, tokens)) { continue; } Token token; if (TryCreateToken(stream, out token)) { tokens.Add(token); } } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return(tokens); }