public static void Run(TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, ShaderLabPreProcessor preProcessor, SeldomInterruptChecker interruptChecker, ITokenIntern intern) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } // Append an EOF token so we insert filtered tokens right up to // the end of the file var eof = new EofToken(lexer.Buffer.Length); root.AppendNewChild(eof); var inserter = new ShaderLabMissingTokensInserter(lexer, offsetProvider, preProcessor, interruptChecker, intern); // Reset the lexer, walk the tree and call ProcessLeafElement on each leaf element lexer.Start(); inserter.Run(root); root.DeleteChildRange(eof, eof); }
public static void Run(TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, bool trimTokens, SeldomInterruptChecker interruptChecker) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } var inserter = new PsiMissingTokensInserter(lexer, offsetProvider, interruptChecker); lexer.Start(); if (trimTokens) { using (var container = new DummyContainer(root)) { inserter.Run(container); } } else { var terminator = new EofToken(lexer.Buffer); root.AppendNewChild(terminator); inserter.Run(root); root.DeleteChildRange(terminator, terminator); } }
public override Token ExtractToken() { SkipWhitespace(); Token token = null; char currentChar = CurrentChar; if (currentChar.IsEOF()) { token = new EofToken(Source); } else if (currentChar == 'w' && Source.PeekChar == ':') { token = new ElementToken(Source); } else if (currentChar.IsLetter()) { token = new WordToken(Source); } else if (currentChar == '"') { token = new StringToken(Source); } else if (currentChar.IsSymbol()) { token = new SymbolToken(Source); } else { token = new Token(Source); } return(token); }
public static void Run( TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, bool trimTokens, SeldomInterruptChecker interruptChecker, ITokenIntern intern) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } var inserter = new NTriplesMissingTokensInserter(lexer, offsetProvider, interruptChecker, intern); lexer.Start(); if (trimTokens) { using (var container = new DummyContainer(root)) { inserter.Run(container); } } else { var terminator = new EofToken(lexer.Buffer); root.AppendNewChild(terminator); inserter.Run(root); root.DeleteChildRange(terminator, terminator); } }
// Extract and return the next Pascal token from the source. protected override Token extractToken() { skipWhiteSpace(); Token token; char currentchar = currentChar(); // Construct the next token. The current character determines the // token type. if (currentchar == Source.EOF) token = new EofToken(source); else if (char.IsLetter(currentchar)) token = new PascalWordToken(source); else if (char.IsDigit(currentchar)) token = new PascalNumberToken(source); else if (currentchar == '\'') token = new PascalStringToken(source); else if (PascalTokenType.SPECIAL_SYMBOLS.ContainsKey(Char.ToString(currentchar))) token = new PascalSpecialSymbolToken(source); else { token = new PascalErrorToken(source, PascalErrorCode.INVALID_CHARACTER, Char.ToString(currentchar)); nextChar(); // consume character } return token; }
protected override Token ExtractToken() { SkipWhitespace(); Token token = new EofToken(Source); var currentChar = CurrentChar(); if (currentChar == Source.Eof) { token = new EofToken(Source); } else if (char.IsLetter(currentChar)) { token = new PascalWordToken(Source); } // else if (char.IsDigit(currentChar)) // { // token = new PascalNumberToken(Source); // } else if (currentChar == '\'') { token = new PascalStringToken(Source); } // else if (PascalTokenType.SpecialSymbols.ContainsKey(currentChar.ToString())) // { // token = new PascalSpecialSymbolToken(Source); // } // else // { // token = new PascalErrorToken(Source, InvalidCharacter, currentChar.ToString()); // NextChar(); // } return(token); }
public void Extract_DoesNothing() { var mockSource = new Mock <ISource>(MockBehavior.Strict); mockSource.Setup(source => source.LineNumber).Returns(0); var charToken = new EofToken(mockSource.Object); charToken.Extract(); }
protected override Token ExtractToken() { Token token; char currentChar = CurrentChar(); // Construct the next token. The current character determines the // token type. if (currentChar == Source.EOF) { token = new EofToken(source); } else { token = new Token(source); } return token; }
protected override Token ExtractToken() { Token token; var currentChar = CurrentChar(); // Construct the next token. The current character determines the // token type. if (currentChar == Constants.EOF) { token = new EofToken(Source); } else { token = new CharToken(Source); } return(token); }
// Extract and return the next Pascal token from the source. protected override Token extractToken() { skipWhiteSpace(); Token token; char currentchar = currentChar(); // Construct the next token. The current character determines the // token type. if (currentchar == Source.EOF) { token = new EofToken(source); } else if (char.IsLetter(currentchar)) { token = new PascalWordToken(source); } else if (char.IsDigit(currentchar)) { token = new PascalNumberToken(source); } else if (currentchar == '\'') { token = new PascalStringToken(source); } else if (PascalTokenType.SPECIAL_SYMBOLS.ContainsKey(Char.ToString(currentchar))) { token = new PascalSpecialSymbolToken(source); } else { token = new PascalErrorToken(source, PascalErrorCode.INVALID_CHARACTER, Char.ToString(currentchar)); nextChar(); // consume character } return(token); }
public static Token Create(string value) { TokenType ttype = GetTokenType(value); Token token = null; switch (ttype) { case TokenType.SUM: case TokenType.SUB: case TokenType.MULT: case TokenType.DIV: case TokenType.POW: case TokenType.REM: case TokenType.AND: case TokenType.OR: case TokenType.NOT: case TokenType.EQUALS: case TokenType.NOTEQUALS: case TokenType.GREATER: case TokenType.GREATEROREQUALS: case TokenType.LOWER: case TokenType.LOWEROREQUALS: case TokenType.EXP: case TokenType.LN: case TokenType.COS: case TokenType.COSH: case TokenType.SIN: case TokenType.SINH: case TokenType.TAN: case TokenType.TANH: case TokenType.SQRT: case TokenType.ABS: case TokenType.LOG10: token = new OperatorToken(ttype); break; case TokenType.STARTPAR: token = new StartParToken(); break; case TokenType.ENDPAR: token = new EndParToken(); break; case TokenType.PAR: token = new ParToken(value); break; case TokenType.NUM: token = new NumToken(value, InferNumericType(value)); break; case TokenType.BOOL: token = new BoolToken(value); break; case TokenType.PI: case TokenType.E: token = new ConstantToken(ttype, value); break; case TokenType.EOF: token = new EofToken(); break; } return token; }