private ParseItemList Parse(ITextProvider text, IItemFactory itemFactory, ITokenStream stream, IParsingExecutionContext context) { var results = new ParseItemList(); while (!context.IsCancellationRequested && stream.Current.Type != TokenType.EndOfFile) { int position = stream.Position; ParseItem item; if (!itemFactory.TryCreate(null, text, stream, out item)) break; if (item.Parse(itemFactory, text, stream)) results.Add(item); // guard against infinite loop (in case token couldn't be handled) if (stream.Position == position) stream.Advance(); } // freeze everything if (!context.IsCancellationRequested) foreach (var item in results) item.Freeze(); return results; }
private async Task<TokenList> TokenizeAsync(ITextProvider text, IParsingExecutionContext context) { var stream = new TextStream(text); var tokens = await Lexer.TokenizeAsync(stream, context); LastTokenizationDuration = Lexer.LastTokenizationDuration; return tokens; }
private async Task <TokenList> TokenizeAsync(ITextProvider text, IParsingExecutionContext context) { var stream = new TextStream(text); var tokens = await Lexer.TokenizeAsync(stream, context); LastTokenizationDuration = Lexer.LastTokenizationDuration; return(tokens); }
public TokenStream(TokenList tokens, IParsingExecutionContext context) { Context = context; Tokens = tokens ?? new TokenList(); if (Tokens.Count == 0) Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0)); END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1]; CachedIndex = int.MinValue; }
public TokenStream(TokenList tokens, IParsingExecutionContext context) { Context = context; Tokens = tokens ?? new TokenList(); if (Tokens.Count == 0) { Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0)); } END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1]; CachedIndex = int.MinValue; }
public async Task<ParseItemList> ParseAsync(ITextProvider text, IParsingExecutionContext context, ISassItemFactory itemFactory) { var tokens = await TokenizeAsync(text, context); var watch = Stopwatch.StartNew(); var stream = CreateTokenStream(tokens, context); var results = Parse(text, new ItemFactory(itemFactory), stream, context); watch.Stop(); LastParsingDuration = watch.Elapsed; return results; }
public async Task <ParseItemList> ParseAsync(ITextProvider text, IParsingExecutionContext context, ISassItemFactory itemFactory) { var tokens = await TokenizeAsync(text, context); var watch = Stopwatch.StartNew(); var stream = CreateTokenStream(tokens, context); var results = Parse(text, new ItemFactory(itemFactory), stream, context); watch.Stop(); LastParsingDuration = watch.Elapsed; return(results); }
TokenList Tokenize(ITextStream stream, IParsingExecutionContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) { break; } if (ConsumeComment(stream, tokens)) { continue; } if (ConsumeNewLine(stream, tokens)) { continue; } if (ConsumeWhitespace(stream)) { continue; } if (ConsumeInterpolation(stream, tokens)) { continue; } Token token; if (TryCreateToken(stream, out token)) { tokens.Add(token); } } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return(tokens); }
TokenList Tokenize(ITextStream stream, IParsingExecutionContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) break; if (ConsumeComment(stream, tokens)) continue; if (ConsumeNewLine(stream, tokens)) continue; if (ConsumeWhitespace(stream)) continue; if (ConsumeInterpolation(stream, tokens)) continue; Token token; if (TryCreateToken(stream, out token)) tokens.Add(token); } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return tokens; }
public Task<TokenList> TokenizeAsync(ITextStream stream, IParsingExecutionContext context) { return Task.Run(() => Tokenize(stream, context)); }
public Task <TokenList> TokenizeAsync(ITextStream stream, IParsingExecutionContext context) { return(Task.Run(() => Tokenize(stream, context))); }
protected virtual ITokenStream CreateTokenStream(TokenList tokens, IParsingExecutionContext context) { return new TokenStream(tokens, context); }
protected virtual ITokenStream CreateTokenStream(TokenList tokens, IParsingExecutionContext context) { return(new TokenStream(tokens, context)); }
private ParseItemList Parse(ITextProvider text, IItemFactory itemFactory, ITokenStream stream, IParsingExecutionContext context) { var results = new ParseItemList(); while (!context.IsCancellationRequested && stream.Current.Type != TokenType.EndOfFile) { int position = stream.Position; ParseItem item; if (!itemFactory.TryCreate(null, text, stream, out item)) { break; } if (item.Parse(itemFactory, text, stream)) { results.Add(item); } // guard against infinite loop (in case token couldn't be handled) if (stream.Position == position) { stream.Advance(); } } // freeze everything if (!context.IsCancellationRequested) { foreach (var item in results) { item.Freeze(); } } return(results); }