public TokenList Tokenize(ITextStream stream, ILexingContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) break; if (ConsumeComment(stream, tokens)) continue; if (ConsumeNewLine(stream, tokens)) continue; if (ConsumeWhitespace(stream)) continue; if (ConsumeInterpolation(stream, tokens)) continue; Token token; if (TryCreateToken(stream, out token)) tokens.Add(token); } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return tokens; }
private bool ConsumeInterpolation(ITextStream stream, TokenList tokens) { if (stream.Current == '#' && stream.Peek(1) == '{') { tokens.Add(Token.Create(TokenType.OpenInterpolation, stream.Position, 2)); stream.Advance(2); while (stream.Current != '}' && !IsNewLine(stream.Current)) { Token token; if (TryCreateToken(stream, out token)) { tokens.Add(token); } } if (stream.Current == '}') { tokens.Add(Token.Create(TokenType.CloseInterpolation, stream.Position, 1)); stream.Advance(); } return(true); } return(false); }
public void TokenList_Simple() { TokenList tokens = new TokenList(); Assert.AreEqual(0, tokens.Count); CssToken[] expect = new CssToken[] { new CssToken(CssTokenType.Asterisk, 10, 1), new CssToken(CssTokenType.Asterisk, 20, 1), new CssToken(CssTokenType.Asterisk, 30, 1), new CssToken(CssTokenType.Asterisk, 40, 1), new CssToken(CssTokenType.Asterisk, 50, 1), new CssToken(CssTokenType.Asterisk, 60, 1), }; // The token collection is supposed to automatically sort its contents, // so add the tokens in a weird order. tokens.Add(expect[3]); tokens.Add(expect[0]); tokens.Add(expect[1]); tokens.Add(expect[4]); tokens.Add(expect[2]); tokens.Insert(tokens.Count, expect[5]); Assert.AreEqual(expect.Length, tokens.Count); for (int i = 0; i < expect.Length; i++) { Assert.AreEqual(expect[i], tokens[i]); Assert.IsTrue(tokens.Contains(expect[i])); Assert.AreEqual(i, tokens.IndexOf(expect[i])); } // Test the binary search for the token collection Assert.AreEqual(0, tokens.FindInsertIndex(0, beforeExisting: true)); Assert.AreEqual(0, tokens.FindInsertIndex(10, beforeExisting: true)); Assert.AreEqual(1, tokens.FindInsertIndex(10, beforeExisting: false)); Assert.AreEqual(3, tokens.FindInsertIndex(35, beforeExisting: true)); Assert.AreEqual(3, tokens.FindInsertIndex(35, beforeExisting: false)); Assert.AreEqual(4, tokens.FindInsertIndex(50, beforeExisting: true)); Assert.AreEqual(5, tokens.FindInsertIndex(50, beforeExisting: false)); Assert.AreEqual(6, tokens.FindInsertIndex(61, beforeExisting: true)); Assert.AreEqual(6, tokens.FindInsertIndex(61, beforeExisting: false)); Assert.AreEqual(6, tokens.FindInsertIndex(100, beforeExisting: true)); Assert.AreEqual(6, tokens.FindInsertIndex(100, beforeExisting: false)); Assert.IsTrue(tokens.Remove(expect[2])); Assert.AreEqual(expect.Length - 1, tokens.Count); Assert.AreEqual(expect[3], tokens[2]); tokens.Clear(); Assert.AreEqual(0, tokens.Count); }
/// <summary> /// Получает тело между {}. /// </summary> private static TokenList GetBody(TokenList tokens, out int counter, int index2, bool errors = true) { TokenList list = new TokenList(); int index = 0; int beginsCount = 0; bool isStart = true; foreach (Token token in tokens) { if (token.TypeIs(TokenType.BeginBrace)) { if (!isStart) { list.Add(token); } else { isStart = false; } beginsCount++; } else if (token.TypeIs(TokenType.EndBrace)) { beginsCount--; if (beginsCount != 0) { list.Add(token); } } else if (beginsCount > 0) { list.Add(token); } index++; if (beginsCount == 0 && !isStart) { break; } } counter = index2 + index; if (beginsCount != 0) { throw new ParseException("Отсутствует закрывающая фигурная скобка", lineIndex); } if (errors && list.Count == 0) { throw new EmptyBodyException("Конструкция с пустым телом", lineIndex); } return(list); }
public static void AddUserToSesion(Account account) { if (_tokenList.Contains(account.Guid)) { _tokenList.RenowExpirationTime(account.Guid); } else { _tokenList.Add(account.Guid); } }
public bool ReadToken(TokenList parentList, List <TokenKind> terminators) { Advance(); if (terminators.Contains(CurrentToken.Kind)) { return(false); } //else switch (CurrentToken.Kind) { case TokenKind.Name: parentList.Add(ReadComplexName(TokenKind.Name)); break; case TokenKind.Dollar: Advance(); parentList.Add(ReadComplexName(TokenKind.Variable)); break; case TokenKind.LeftRound: ReadTokenList(parentList, TokenKind.RoundList, RoundListTerminators); break; case TokenKind.LeftSquare: ReadTokenList(parentList, TokenKind.SquareList, SquareListTerminators); break; case TokenKind.LeftAngle: ReadTokenList(parentList, TokenKind.SquareList, AngleListTerminators); break; case TokenKind.LineEnd: if (NextToken.Kind == TokenKind.Indent) { Advance(); ReadTokenLists(parentList); return(false); } return(false); //break; case TokenKind.Dedent: throw new Escape(); default: parentList.Add(CurrentToken); break; } return(true); }
private bool ConsumeComment(ITextStream stream, TokenList tokens) { if (stream.Current == '/') { int start = stream.Position; var next = stream.Peek(1); if (next == '/' && stream.Peek(2) == '/') { stream.Advance(3); tokens.Add(Token.Create(TokenType.XmlDocumentationComment, start, 3)); return(true); } else if (next == '/') { stream.Advance(2); tokens.Add(Token.Create(TokenType.CppComment, start, 2)); if (!IsNewLine(stream.Current)) { ConsumeCommentText(stream, tokens, s => IsNewLine(s.Current)); } return(true); } else if (next == '*') { stream.Advance(2); tokens.Add(Token.Create(TokenType.OpenCssComment, start, 2)); start = stream.Position; ConsumeCommentText(stream, tokens, s => s.Current == '*' && s.Peek(1) == '/'); if (stream.Current == '*' && stream.Peek(1) == '/') { start = stream.Position; stream.Advance(2); tokens.Add(Token.Create(TokenType.CloseCssComment, start, 2)); } return(true); } return(false); } return(false); }
/// <summary> /// Возвращает токены между (); /// </summary> private static TokenList GetExpressionInParenthesis(TokenList lineTokens, bool errors = true) { TokenList list = new TokenList(); bool isStart = true; int beginsCount = 0; foreach (Token token in lineTokens) { if (token.TypeIs(TokenType.BeginParenthesis)) { if (beginsCount > 0) { list.Add(token); } beginsCount++; isStart = false; } else if (token.TypeIs(TokenType.EndParenthesis)) { beginsCount--; if (beginsCount > 0) { list.Add(token); } } else if (beginsCount > 0) { list.Add(token); } if (beginsCount == 0 && !isStart) { break; } } if (beginsCount != 0) { throw new ParseException("Отсутствует закрывающая скобка в конструкции", lineIndex); } if (errors && list.Count == 0) { throw new EmptyExpressionException("Пустое выражение", lineIndex); } return(list); }
TokenList Tokenize(ITextStream stream, IParsingExecutionContext context) { var watch = Stopwatch.StartNew(); var tokens = new TokenList(); tokens.Add(Token.CreateEmpty(TokenType.StartOfFile, stream.Position)); while (!context.IsCancellationRequested) { if (stream.Position >= stream.Length) { break; } if (ConsumeComment(stream, tokens)) { continue; } if (ConsumeNewLine(stream, tokens)) { continue; } if (ConsumeWhitespace(stream)) { continue; } if (ConsumeInterpolation(stream, tokens)) { continue; } Token token; if (TryCreateToken(stream, out token)) { tokens.Add(token); } } // close stream with end of file token tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, stream.Length)); watch.Stop(); LastTokenizationDuration = watch.Elapsed; return(tokens); }
public TokenList Tokenize(Stream str) { StreamReader sr = new StreamReader(str); String line; TokenList tokenList = new TokenList(); try { TokenList tokList = null; while ((line = sr.ReadLine()) != null) { tokList = TokenizeLine(line); if (tokList != null) { tokenList.AddRange(tokList); } } } finally { str.Close(); } tokenList.Add(new Token(TokenKind.FileEnd, 0)); return(tokenList); }
void Dedent(int indentLevel, TokenList tokenList) { while (IndentStack.Peek() != indentLevel) { tokenList.Add(new Token(TokenKind.Dedent, IndentStack.Pop())); } }
private void addToken(int tokenLen, TokenType type) { if (type == TokenType.Comment) { return; } //Console.WriteLine("idx[" + currentIdx_ + "] ch[" + "] tok len[" + tokenLen + "]" ); var token = CodeToken.Build(type, SourceCode.Substring(currentIdx_ - tokenLen + 1, tokenLen)); token.LineNumber = lineCount_; token.ColumeNumber = tokenLeft_ - currentLineIdx_; TokenList.Add(token); if (TokenMatrix.Count == 0 || TokenMatrix.ElementAt(TokenMatrix.Count - 1).LineNum != lineCount_) { var lastLine = new CodeLine(lineCount_); lastLine.Add(token); TokenMatrix.Add(lastLine); // Console.WriteLine("NEW " + lastLine.ToString()); } else { TokenMatrix.ElementAt(TokenMatrix.Count - 1).Add(token); // Console.WriteLine(tokenFile_.ElementAt(tokenFile_.Count - 1).ToString()); } this.TokenCount++; }
public static TokenList GetOpeningBraces(TokenList tokens) { TokenList braces = new TokenList(); foreach (Token token in tokens) if (token.Term.IsSet(TermOptions.IsOpenBrace) && token.OtherBrace != null) braces.Add(token); return braces; }
private void emptyNumberBufferAsLiteral() { if (numberBuffer.Count > 0) { TokenList.Add(new Token("Literal", string.Join("", numberBuffer.ToArray()))); numberBuffer.Clear(); } }
public void emptyNumberBufferAsLiteral(bool SymbolEnd = false) { if (numberBuffer.Count > 0) { Token t = new Token("Literal", string.Join("", numberBuffer.ToArray())); t.SymbolEnd = SymbolEnd; TokenList.Add(t); numberBuffer.Clear(); } }
public TokenStream(TokenList tokens, IParsingExecutionContext context) { Context = context; Tokens = tokens ?? new TokenList(); if (Tokens.Count == 0) Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0)); END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1]; CachedIndex = int.MinValue; }
/// <summary> /// トークンを全て読み込む。 /// </summary> /// <returns>トークンの配列</returns> public TokenList ReadAll() { var tokens = new TokenList(); for (var token = Read(); token != null; token = Read()) { tokens.Add(token); } return(tokens); }
public TokenStream(TokenList tokens, IParsingExecutionContext context) { Context = context; Tokens = tokens ?? new TokenList(); if (Tokens.Count == 0) { Tokens.Add(Token.CreateEmpty(TokenType.EndOfFile, 0)); } END_OF_FILE_TOKEN = Tokens[Tokens.Count - 1]; CachedIndex = int.MinValue; }
//Create new token and add to list private void BuildToken(TokenTypes type) { Token token = new Token { token = currentToken.ToString(), lineNumber = lineNum, tokenType = type }; TokenList.Add(token); currentToken = ""; //Start new token next iteration }
private bool ParseStatement(TemplateStream stream, TokenList tokens, ContextStack context, ScopeStack scope) { if (scope.Current == Scope.Statement) { scope.Pop(); } else if (scope.Current == Scope.Block) { var previous = stream.Peek(-1); if (previous == '$' || char.IsLetterOrDigit(previous)) { return(false); } } else { return(false); } var name = stream.PeekWord(); var identifier = context.Current.GetIdentifier(name); if (identifier != null) { tokens.Add(new Token(stream.Position, name.Length, stream.Line, TokenType.Identifier, context.Current, identifier.QuickInfo)); stream.Advance(name.Length); if (identifier.Type == IdentifierType.Indexed) { if (stream.Current == '(') { scope.Push(Scope.Filter); } if (stream.Current == '[') { scope.Push(Scope.Template); context.Push(name, stream.Position); } } else if (identifier.Type == IdentifierType.Boolean) { if (stream.Current == '[') { scope.Push(Scope.True); } } return(true); } return(false); }
public static TokenList GetOpeningBraces(TokenList tokens) { TokenList braces = new TokenList(); foreach (Token token in tokens) { if (token.Term.IsSet(TermOptions.IsOpenBrace) && token.OtherBrace != null) { braces.Add(token); } } return(braces); }
public TokenList ScanAll() { TokenList tokenList = new TokenList(); Token tok; do { tok = new Token(); this.ScanOne(tok); tokenList.Add(tok); }while (tok.type != TokenType.EOF); return(tokenList); }
private void BuildToken(TokenTypes type, ConstTypes constType) { Token token = new Token { token = currentToken.ToString(), lineNumber = lineNum, tokenType = type, constType = constType }; TokenList.Add(token); currentToken = ""; }
public void Add() { _target.Add("A"); _target.Add("B"); Assert.AreEqual(2, _target.Count); Assert.AreEqual("A", _target[0]); Assert.AreEqual("B", _target[1]); }
public TokenList ScanAll() { var tokens = new TokenList(); while (true) { var tok = new Token(); ScanOne(tok); tokens.Add(tok); if (tok.type == TokenType.EOF) break; } return tokens; }
public TokenList ScanAll() { TokenList list = new TokenList(); while (true) { Token tok = new Token(); this.ScanOne(tok); list.Add(tok); if (tok.type == UnityEditor.iOS.Xcode.PBX.TokenType.EOF) { return list; } } }
public TokenList ScanAll() { TokenList list = new TokenList(); while (true) { Token tok = new Token(); this.ScanOne(tok); list.Add(tok); if (tok.type == UnityEditor.iOS.Xcode.PBX.TokenType.EOF) { return(list); } } }
private bool ConsumeNewLine(ITextStream stream, TokenList tokens) { if (IsNewLine(stream.Current)) { int start = stream.Position; while (IsNewLine(stream.Current)) { stream.Advance(); } tokens.Add(Token.Create(TokenType.NewLine, start, stream.Position - start)); return(true); } return(false); }
public void emptyLetterBufferAsVariables(bool SymbolEnd = false) { var l = letterBuffer.Count; for (var i = 0; i < l; i++) { Token t = new Token("Variable", letterBuffer[i]); t.SymbolEnd = SymbolEnd; TokenList.Add(t); if (i < l - 1) { //there are more Variables left TokenList.Add(new Token("Operator", "*")); } } letterBuffer.Clear(); }
public ITokenList Tokenize() { var tokens = new TokenList() { new Token(TokenType.Parenthesis, "(") }; foreach (var node in this) { tokens.Concat(node.Tokenize()); } tokens.Add(new Token(TokenType.Parenthesis, ")")); return(tokens); }
public TokenList ExtractTokensInRange(TokenList tokens, int from, int until) { TokenList result = new TokenList(); for (int i = tokens.Count - 1; i >= 0; i--) { var tkn = tokens[i]; if (tkn.Location.Position > until || (tkn.Location.Position + tkn.Length < from)) { continue; } result.Add(tkn); tokens.RemoveAt(i); } return(result); }
/// <summary> /// 对应的单词列表 /// </summary> public TokenList <TEnumTokenType> GetMappedTokenList() { var result = new TokenList <TEnumTokenType>(); if (MappedTotalTokenList != null && MappedTotalTokenList.Count > 0) { if (MappedTokenLength > 0) { for (int i = 0, j = MappedTokenStartIndex; i < MappedTokenLength; i++, j++) { result.Add(MappedTotalTokenList[j]); } } } return(result); }
public void BuildAsciiText() { for (int line = 1; line <= MaxLines; ++line) { var lineBuilder = new StringBuilder(); int currentPosition = 0; var globColType = iColumnMode; var tokens = new List <string>(); TokenList.Add(tokens); for (int c = 1; c <= MaxColumns; ++c) // 4 columns { var locColType = globColType % 3; globColType /= 3; string token; long value; switch (locColType) { case 0: // leftJustified value = line + c + GetLongOfWidth(1 + (line + c) % (fieldWidth - 1)); token = ToTabFilledLeftAdjusted(lineBuilder, ref currentPosition, value, Culture, fieldWidth, tabSize); tokens.Add(token.Trim()); break; case 1: // rightJustified value = line + c + GetLongOfWidth(1 + (line + c) % (fieldWidth - 1)); token = ToTabFilledRightAdjusted(lineBuilder, ref currentPosition, value, Culture, fieldWidth, tabSize); tokens.Add(token.Trim()); break; case 2: // just fitting exactly in our cell value = line + c + GetLongOfWidth(fieldWidth - 1); token = ToTabFilledRightAdjusted(lineBuilder, ref currentPosition, value, Culture, fieldWidth, tabSize); tokens.Add(token.Trim()); break; default: throw new InvalidProgramException(); } } LineList.Add(lineBuilder.ToString()); } }
private void LoadTokens() { if (!Directory.Exists(_tokenDir)) { Directory.CreateDirectory(_tokenDir); } string[] files = Directory.GetFiles(_tokenDir); foreach (var imageFilePath in files) { if (imageFilePath.Substring(imageFilePath.Length - 4).Equals(".png")) { string jsonFilePath = imageFilePath.Replace("png", "json"); TokenList.Add(new ModelToken(this, imageFilePath, jsonFilePath)); } } }
public CssRule ReadMediaRule() { // @media { var span = new TokenList(); while (current.Kind != TokenKind.BlockStart && !isEnd) { span.Add(Read()); } var rule = new MediaRule(span.RawText.Trim()); ReadBlock(rule); return rule; }
//Note: we don't actually parse in current version, only scan. Will implement full parsing in the future, // to support all intellisense operations private void ParseSource(String newText) { TokenList newTokens = new TokenList(); //Explicitly catch the case when new text is empty if (newText != string.Empty) { SourceFile srcFile = new SourceFile(newText, "source"); _compiler.Scanner.Prepare(_context, srcFile); IEnumerable<Token> tokenStream = _compiler.Scanner.BeginScan(); foreach (Token _token in tokenStream) { newTokens.Add(_token); } //newTokens.AddRange(tokenStream); } //finally create new contents object and replace the existing _contents value _parsedSource = new ParsedSource(newText, newTokens, null); //notify views var views = GetViews(); foreach (var view in views) view.UpdateParsedSource(_parsedSource); }
/// <summary>Parse next token from currently parsed line, starting at given position and /// add the retrieved token at end of given token list.</summary> /// <param name="aList">The token list where to add the newly recognized token.</param> /// <param name="lineNumber">Line number for diagnostics and debugging purpose.</param> /// <param name="rPos">The index in current source code line of the first not yet consumed /// character. On return this parameter will be updated to account for characters that would /// have been consumed.</param> protected void NewToken(TokenList aList, int lineNumber, ref int rPos) { #region Pattern Notes // All patterns start with _, this makes them reserved. User can use too, but at own risk of conflict. // // Wildcards // -_REG or ??X // -_REG8 or ?H,?L // -_REG16 or ?X // -_REG32 or E?X // - ? based ones are ugly and less clear // -_Keyword // -_ABC // // // Multiple Options (All caps only) - Registers only // Used to suport EAX,EBX - ie lists. But found out wasnt really needed. May add again later. // // -AX/AL - Conflict if we ever use / // -AX|AL - Conflict if we ever use | // -AX,AL - , is unlikely to ever be used as an operator and is logical as a separator. Method calls might use, but likely better to use a space // since we will only allow simple arguments, not compound. // -_REG:AX|AL - End terminator issue // -_REG[AX|AL] - Conflict with existing indirect access. Is indirect access always numeric? I think x86 has some register based ones too. // // // Specific: Register, Keyword, AlphaNum // -EAX #endregion string xString = null; char xChar1 = mData[mStart]; var xToken = new Token(lineNumber); // Recognize comments and literal assembler code. if (mAllWhitespace && "/!".Contains(xChar1)) { rPos = mData.Length; // This will account for the dummy whitespace at the end. xString = mData.Substring(mStart + 1, rPos - mStart - 1).Trim(); // So ToString/Format wont generate error xString = xString.Replace("{", "{{"); xString = xString.Replace("}", "}}"); // Fix issue #15662 with string length check. // Fix issue #15663 with comparing from mData and not from xString anymore. if (('/' == xChar1) && (2 <= xString.Length) && ('/' == mData[mStart + 1])) { xString = xString.Substring(1); xToken.Type = TokenType.Comment; } else if (xChar1 == '!') { // Literal assembler code. xToken.Type = TokenType.LiteralAsm; } } else { xString = mData.Substring(mStart, rPos - mStart); if (string.IsNullOrWhiteSpace(xString) && xString.Length > 0) { xToken.Type = TokenType.WhiteSpace; } else if (xChar1 == '\'') { xToken.Type = TokenType.ValueString; xString = xString.Substring(1, xString.Length - 2); } else if (char.IsDigit(xChar1)) { xToken.Type = TokenType.ValueInt; if (xString.StartsWith("0x")) { xToken.SetIntValue(Convert.ToUInt32(xString, 16)); } else { xToken.SetIntValue(uint.Parse(xString)); } } else if (xChar1 == '$') { xToken.Type = TokenType.ValueInt; // Remove surrounding ' xString = "0x" + xString.Substring(1); if (xString.StartsWith("0x")) { xToken.SetIntValue(Convert.ToUInt32(xString, 16)); } else { xToken.SetIntValue(uint.Parse(xString)); } } else if (IsAlphaNum(xChar1)) { // This must be after check for ValueInt string xUpper = xString.ToUpper(); // Special parsing when in pattern mode. We recognize some special strings // which would otherwise be considered as simple AlphaNum token otherwise. if (mAllowPatterns) { if (RegisterPatterns.Contains(xUpper)) { xToken.Type = TokenType.Register; } else if (xUpper == "_KEYWORD") { xToken.Type = TokenType.Keyword; xString = null; } else if (xUpper == "_ABC") { xToken.Type = TokenType.AlphaNum; xString = null; } else if (xUpper == "_PCALL") { xString = null; xToken.Type = TokenType.Call; } } if (xToken.Type == TokenType.Unknown) { Register xRegister; if (Registers.TryGetValue(xUpper, out xRegister)) { xToken.Type = TokenType.Register; xToken.SetRegister(xRegister); } else if (mKeywords.Contains(xUpper)) { xToken.Type = TokenType.Keyword; } else if(xString.Contains("(") && xString.Contains(")") && IsAlphaNum(xChar1)) { xToken.Type = TokenType.Call; } else { xToken.Type = TokenType.AlphaNum; } } } else if (Delimiters.Contains(xString)) { xToken.Type = TokenType.Delimiter; } else if (Operators.Contains(xString)) { xToken.Type = TokenType.Operator; } } xToken.RawValue = xString; xToken.SrcPosStart = mStart; xToken.SrcPosEnd = xToken.Type == TokenType.Call ? rPos : rPos - 1; if (mAllWhitespace && (xToken.Type != TokenType.WhiteSpace)) { mAllWhitespace = false; } mStart = xToken.Type == TokenType.Call ? rPos + 1 : rPos; if (mIncludeWhiteSpace || (xToken.Type != TokenType.WhiteSpace)) { aList.Add(xToken); } }
public CssSelector ReadSelector() { // #id.hello { } var span = new TokenList(); while (current.Kind != TokenKind.BlockStart && !isEnd) { var token = tokenizer.Read(); // Consider multiselectors // if (token.Kind == TokenKind.Comma) ; span.Add(token); } return new CssSelector(span); }
public TokenList ReadSpan() { var list = new TokenList(); while (!isEnd) { list.Add(Read()); if (current.Kind == TokenKind.Colon || current.Kind == TokenKind.BlockStart || current.Kind == TokenKind.BlockEnd || current.Kind == TokenKind.Semicolon) { break; } } ReadTrivia(); // Trialing trivia return list; }
public CssRule ReadKeyframesRule() { // @media only screen and (min-width : 1600px) { var span = new TokenList(); while (current.Kind != TokenKind.BlockStart && !isEnd) { span.Add(Read()); // Read the token } var rule = new KeyframesRule(span.RawText.Trim()); ReadBlock(rule); return rule; }
private bool ConsumeNewLine(ITextStream stream, TokenList tokens) { if (IsNewLine(stream.Current)) { int start = stream.Position; while (IsNewLine(stream.Current)) stream.Advance(); tokens.Add(Token.Create(TokenType.NewLine, start, stream.Position - start)); return true; } return false; }
private bool ConsumeInterpolation(ITextStream stream, TokenList tokens) { if (stream.Current == '#' && stream.Peek(1) == '{') { tokens.Add(Token.Create(TokenType.OpenInterpolation, stream.Position, 2)); stream.Advance(2); while (stream.Current != '}' && !IsNewLine(stream.Current)) { Token token; if (TryCreateToken(stream, out token)) tokens.Add(token); } if (stream.Current == '}') { tokens.Add(Token.Create(TokenType.CloseInterpolation, stream.Position, 1)); stream.Advance(); } return true; } return false; }
private bool ConsumeComment(ITextStream stream, TokenList tokens) { if (stream.Current == '/') { int start = stream.Position; var next = stream.Peek(1); if (next == '/' && stream.Peek(2) == '/') { stream.Advance(3); tokens.Add(Token.Create(TokenType.XmlDocumentationComment, start, 3)); return true; } else if (next == '/') { stream.Advance(2); tokens.Add(Token.Create(TokenType.CppComment, start, 2)); if (!IsNewLine(stream.Current)) ConsumeCommentText(stream, tokens, s => IsNewLine(s.Current)); return true; } else if (next == '*') { stream.Advance(2); tokens.Add(Token.Create(TokenType.OpenCssComment, start, 2)); start = stream.Position; ConsumeCommentText(stream, tokens, s => s.Current == '*' && s.Peek(1) == '/'); if (stream.Current == '*' && stream.Peek(1) == '/') { start = stream.Position; stream.Advance(2); tokens.Add(Token.Create(TokenType.CloseCssComment, start, 2)); } return true; } return false; } return false; }
private bool ConsumeCommentText(ITextStream stream, TokenList tokens, Func<ITextStream, bool> predicate) { int start = stream.Position; while (stream.Position < stream.Length) { if (predicate(stream)) break; stream.Advance(); } if (start != stream.Position) { stream.Reverse(1); stream.Advance(); tokens.Add(Token.Create(TokenType.CommentText, start, stream.Position - start)); return true; } return false; }