Пример #1
0
        private ParseItemList Parse(ITextProvider text, IItemFactory itemFactory, ITokenStream stream, IParsingExecutionContext context)
        {
            var results = new ParseItemList();
            while (!context.IsCancellationRequested && stream.Current.Type != TokenType.EndOfFile)
            {
                int position = stream.Position;

                ParseItem item;
                if (!itemFactory.TryCreate(null, text, stream, out item))
                    break;

                if (item.Parse(itemFactory, text, stream))
                    results.Add(item);

                // guard against infinite loop (in case token couldn't be handled)
                if (stream.Position == position)
                    stream.Advance();
            }

            // freeze everything
            if (!context.IsCancellationRequested)
                foreach (var item in results)
                    item.Freeze();

            return results;
        }
Пример #2
0
        private async Task<TokenList> TokenizeAsync(ITextProvider text, IParsingExecutionContext context)
        {
            var stream = new TextStream(text);
            var tokens = await Lexer.TokenizeAsync(stream, context);
            LastTokenizationDuration = Lexer.LastTokenizationDuration;

            return tokens;
        }
Пример #3
0
        private async Task <TokenList> TokenizeAsync(ITextProvider text, IParsingExecutionContext context)
        {
            var stream = new TextStream(text);
            var tokens = await Lexer.TokenizeAsync(stream, context);

            LastTokenizationDuration = Lexer.LastTokenizationDuration;

            return(tokens);
        }
Пример #4
0
        public TokenStream(TokenList tokens, IParsingExecutionContext context)
        {
            Context = context;
            Tokens = tokens ?? new TokenList();
            if (Tokens.Count == 0)
                Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0));

            END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1];
            CachedIndex = int.MinValue;
        }
Пример #5
0
        public TokenStream(TokenList tokens, IParsingExecutionContext context)
        {
            Context = context;
            Tokens  = tokens ?? new TokenList();
            if (Tokens.Count == 0)
            {
                Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0));
            }

            END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1];
            CachedIndex       = int.MinValue;
        }
Пример #6
0
        public async Task<ParseItemList> ParseAsync(ITextProvider text, IParsingExecutionContext context, ISassItemFactory itemFactory)
        {
            var tokens = await TokenizeAsync(text, context);

            var watch = Stopwatch.StartNew();

            var stream = CreateTokenStream(tokens, context);
            var results = Parse(text, new ItemFactory(itemFactory), stream, context);

            watch.Stop();
            LastParsingDuration = watch.Elapsed;
            return results;
        }
Пример #7
0
        public async Task <ParseItemList> ParseAsync(ITextProvider text, IParsingExecutionContext context, ISassItemFactory itemFactory)
        {
            var tokens = await TokenizeAsync(text, context);

            var watch = Stopwatch.StartNew();

            var stream  = CreateTokenStream(tokens, context);
            var results = Parse(text, new ItemFactory(itemFactory), stream, context);

            watch.Stop();
            LastParsingDuration = watch.Elapsed;
            return(results);
        }
Пример #8
0
        TokenList Tokenize(ITextStream stream, IParsingExecutionContext context)
        {
            var watch  = Stopwatch.StartNew();
            var tokens = new TokenList();

            tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position));

            while (!context.IsCancellationRequested)
            {
                if (stream.Position >= stream.Length)
                {
                    break;
                }

                if (ConsumeComment(stream, tokens))
                {
                    continue;
                }

                if (ConsumeNewLine(stream, tokens))
                {
                    continue;
                }

                if (ConsumeWhitespace(stream))
                {
                    continue;
                }

                if (ConsumeInterpolation(stream, tokens))
                {
                    continue;
                }

                Token token;
                if (TryCreateToken(stream, out token))
                {
                    tokens.Add(token);
                }
            }

            // close stream with end of file token
            tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length));

            watch.Stop();
            LastTokenizationDuration = watch.Elapsed;
            return(tokens);
        }
Пример #9
0
        TokenList Tokenize(ITextStream stream, IParsingExecutionContext context)
        {
            var watch = Stopwatch.StartNew();
            var tokens = new TokenList();
            tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position));

            while (!context.IsCancellationRequested)
            {
                if (stream.Position >= stream.Length)
                    break;

                if (ConsumeComment(stream, tokens))
                    continue;

                if (ConsumeNewLine(stream, tokens))
                    continue;

                if (ConsumeWhitespace(stream))
                    continue;

                if (ConsumeInterpolation(stream, tokens))
                    continue;

                Token token;
                if (TryCreateToken(stream, out token))
                    tokens.Add(token);
            }

            // close stream with end of file token
            tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length));

            watch.Stop();
            LastTokenizationDuration = watch.Elapsed;
            return tokens;
        }
Пример #10
0
 public Task<TokenList> TokenizeAsync(ITextStream stream, IParsingExecutionContext context)
 {
     return Task.Run(() => Tokenize(stream, context));
 }
Пример #11
0
 public Task <TokenList> TokenizeAsync(ITextStream stream, IParsingExecutionContext context)
 {
     return(Task.Run(() => Tokenize(stream, context)));
 }
Пример #12
0
 protected virtual ITokenStream CreateTokenStream(TokenList tokens, IParsingExecutionContext context)
 {
     return new TokenStream(tokens, context);
 }
Пример #13
0
 protected virtual ITokenStream CreateTokenStream(TokenList tokens, IParsingExecutionContext context)
 {
     return(new TokenStream(tokens, context));
 }
Пример #14
0
        private ParseItemList Parse(ITextProvider text, IItemFactory itemFactory, ITokenStream stream, IParsingExecutionContext context)
        {
            var results = new ParseItemList();

            while (!context.IsCancellationRequested && stream.Current.Type != TokenType.EndOfFile)
            {
                int position = stream.Position;

                ParseItem item;
                if (!itemFactory.TryCreate(null, text, stream, out item))
                {
                    break;
                }

                if (item.Parse(itemFactory, text, stream))
                {
                    results.Add(item);
                }

                // guard against infinite loop (in case token couldn't be handled)
                if (stream.Position == position)
                {
                    stream.Advance();
                }
            }

            // freeze everything
            if (!context.IsCancellationRequested)
            {
                foreach (var item in results)
                {
                    item.Freeze();
                }
            }

            return(results);
        }