public MoshParser(ILexer lexer) { m_startRuleToken = new Token { TokenType = TokenType.NEW_RULE }; m_tree = new Node<string>(); m_lexer = lexer; m_consumed = new Stack<Token>(); m_err = new Queue<Error>(); // Setup rule pre-calling conditions m_rulePreHook = name => { if (m_tree.Value == null) { m_tree.Value = name; return m_tree; } var tempNode = m_tree; var newTopNode = m_tree.AddChild(name); m_tree = newTopNode; return tempNode; }; // Setup rule post-calling conditions m_rulePostHook = node => m_tree = node; }
public SpimiIndexer(ILexer lexer) { this.lexer = lexer; this.blockReader = new SpimiBlockReader(); this.blockWriter = new SpimiBlockWriter(); this.fileIndexWriter = new FileIndexWriter(); }
/// <summary> /// Initialize code highlighting. /// </summary> private void InitializeHighlighting(string keywordsXml) { StyleNeeded += new EventHandler<StyleNeededEventArgs>(HandleStyleNeeded); CaretLineBackColor = Theme.WorkspaceHighlight; CaretLineVisible = true; CaretForeColor = Theme.HighlightForeColor; StyleResetDefault(); Styles[Style.Default].Font = "Consolas"; Styles[Style.Default].Size = 11; StyleClearAll(); Styles[Style.Default].ForeColor = Theme.TextColor; Styles[Style.Default].BackColor = Theme.Workspace; Styles[Style.LineNumber].ForeColor = Theme.HighlightBackColor; Styles[Style.LineNumber].BackColor = Theme.Workspace; Styles[Style.CallTip].Font = "Sans"; Styles[Style.CallTip].ForeColor = Theme.HighlightBackColor; Styles[Style.CallTip].BackColor = Theme.BackColor; if (FxLexer == null) FxLexer = new FxLexer(keywordsXml); // set styles as defined in the keyword file foreach (var style in FxLexer.Styles) { Styles[style.id].ForeColor = style.fore; Styles[style.id].BackColor = style.back; } Lexer = ScintillaNET.Lexer.Container; }
public ReferenceModule ParseModule(string text) { lexer = new CSharpLexer(new StringBuffer(text)); Start(); XmlToken token = null; try { token = ParseModuleName(); } catch (UnexpectedToken ex) { token = ex.ParsingResult as XmlToken; if(token != null) { ReferenceModule @ref = CreateReferenceModule(token); UnexpectedToken th = new UnexpectedToken("Unexpected token"); th.ParsingResult = @ref; throw th; } throw; } return CreateReferenceModule(token); }
internal T4Parser([NotNull] T4Environment t4Environment, [NotNull] DirectiveInfoManager directiveInfoManager, [NotNull] ILexer lexer, [CanBeNull] IPsiSourceFile sourceFile) { _t4Environment = t4Environment; _directiveInfoManager = directiveInfoManager; _lexer = lexer; _sourceFile = sourceFile; }
public void Configure(Action<ParsingConfiguration> configMethod) { configMethod.Invoke(_parsingContext.Configuration); _lexer = _parsingContext.Configuration.Lexer ?? _lexer; _graphBuilder = _parsingContext.Configuration.GraphBuilder ?? _graphBuilder; _compiler = _parsingContext.Configuration.ExpressionCompiler ?? _compiler; }
public static void Run(TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, bool trimTokens, SeldomInterruptChecker interruptChecker) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } var inserter = new PsiMissingTokensInserter(lexer, offsetProvider, interruptChecker); lexer.Start(); if (trimTokens) { using (var container = new DummyContainer(root)) { inserter.Run(container); } } else { var terminator = new EofToken(lexer.Buffer); root.AppendNewChild(terminator); inserter.Run(root); root.DeleteChildRange(terminator, terminator); } }
public NTriplesParser(ILexer lexer, CommonIdentifierIntern commonIdentifierIntern) { this.commonIdentifierIntern = commonIdentifierIntern; this.originalLexer = lexer; this.myCheckForInterrupt = new SeldomInterruptChecker(); this.setLexer(new NTriplesFilteringLexer(lexer)); }
public PsiParser(ILexer lexer) { myCheckForInterrupt = new SeldomInterruptChecker(); myOriginalLexer = lexer; myLexer = new FilteringPsiLexer(lexer); myLexer.Start(); }
private static void MainLoop(ILexer lexer, IParser parser) { // top ::= definition | external | expression | ';' while (true) { Console.Write("ready> "); switch (lexer.CurrentToken) { case (int)Token.EOF: return; case ';': lexer.GetNextToken(); break; case (int)Token.DEF: parser.HandleDefinition(); break; case (int)Token.EXTERN: parser.HandleExtern(); break; default: parser.HandleTopLevelExpression(); break; } } }
public IEnumerable<ILexer> GetMatches(ILexer lexer, string ruleName) { var wrapRules = new List<GrammarRule>(); var results = new List<ILexer>(); foreach (var rule in _table.Get(ruleName)) { if (rule.Elements[0] is GrammarRuleElementEmbed && ((GrammarRuleElementEmbed) rule.Elements[0]).RuleName == ruleName) { if (rule.Elements.Length > 1) wrapRules.Add(rule); } else { foreach (var match in GetMatches(lexer, rule)) { yield return match; results.Add(match); } } } foreach (var start in results) foreach (var match in MatchesRecursive(start, wrapRules)) yield return match; }
protected override ILexer<Token> PrepareLexer(ILexer<Token> lexer, ICharSource file, int position) { if (lexer == null) return new EcsLexer(file, "?", MessageSink.Trace, position); ((EcsLexer)lexer).Reset(file, "?", position); return lexer; }
/// <inheritdoc /> public ILexer<Repetition> Create(ILexer lexer) { if (lexer == null) { throw new ArgumentNullException(nameof(lexer)); } return new OptionLexer(lexer); }
static void RunQuery(string query, ILexer lexer) { Console.WriteLine(query); foreach (var token in lexer.Tokenize(query)) { Console.WriteLine(token); } }
public Parser(ILexer lexer) { this.lexer = lexer; // prime the lookahead this.currentToken = lexer.GetToken (); this.nextToken = lexer.GetToken (); }
public void SetupLexer(ILexer lexer) { if (lexer == null) throw new ArgumentNullException("lexer"); lexer.IgnoreWhitespace = true; lexer.TokenDefinitions = _tokenDefinitions; }
/// <summary> /// Initializes a new instance of the <see cref="T:Parser"/> class. /// </summary> /// <param name="reader">The reader.</param> public Parser(TextReader reader) { if (reader == null) { throw new ArgumentNullException("reader"); } this.lexer = CreateLexer(reader); this.current = this.lexer.NextToken(); }
public EdgeParser(ILexer lexer) { this.lexer = lexer; assemblies = new HashSet<string>(); namespaces = new HashSet<string>(); objects = new Dictionary<string, ObjectNode>(); }
protected ReferenceName ParseTypeNameOrAttributeValue(string text ) { lexer = new CSharpLexer(new StringBuffer(text)); Start(); TreeElement firstIdentifier = ParseIdentifier(); TreeElement result = ParseReferencedName(firstIdentifier); return (ReferenceName)result; }
public SpimiIndexer(ILexer lexer, IParser parser, Stream indexStream, Stream metadata) { this.lexer = lexer; this.parser = parser; this.termIndexBlockWriter = new SpimiBlockWriter(); this.indexStream = indexStream; this.metadataWriter = new CollectionMetadataWriter(metadata); }
public ExpressionResult FindExpression(string text, int offset) { Init(text, offset); ExpressionFinder p = new ExpressionFinder(); lexer = ParserFactory.CreateLexer(SupportedLanguage.VBNet, new StringReader(text)); Token t = lexer.NextToken(); // put all tokens in front of targetPosition into the EF-Parser while (t.EndLocation < targetPosition) { p.InformToken(t); t = lexer.NextToken(); } // put current token into EF-Parser if it cannot be continued (is simple operator) if (t.EndLocation == targetPosition && ((t.Kind <= Tokens.ColonAssign && t.Kind > Tokens.Identifier) || t.Kind == Tokens.EOL)) { p.InformToken(t); t = lexer.NextToken(); } // make sure semantic actions are executed p.Advance(); // remember current state, we'll use it to determine the context var block = p.CurrentBlock; ExpressionContext context = p.IsIdentifierExpected && !p.IsMissingModifier ? ExpressionContext.IdentifierExpected : GetContext(block); BitArray expectedSet; try { expectedSet = p.GetExpectedSet(); } catch (InvalidOperationException) { expectedSet = null; } // put current token into EF-Parser if (t.Location < targetPosition) { p.InformToken(t); } if (p.Errors.Any()) { foreach (var e in p.Errors) LoggingService.Warn("not expected: " + e); } if (p.NextTokenIsPotentialStartOfExpression) return new ExpressionResult("", new DomRegion(targetPosition.Line, targetPosition.Column), context, expectedSet); int lastExpressionStartOffset = LocationToOffset(p.CurrentBlock.lastExpressionStart); if (lastExpressionStartOffset < 0) return new ExpressionResult("", new DomRegion(targetPosition.Line, targetPosition.Column), context, expectedSet); return MakeResult(text, lastExpressionStartOffset, offset, context, expectedSet); }
public Parser(ILexer lexer, IOperandFactory directOperandFactory, IOperandFactory indirectOperandFactory) { this.lexer = lexer; this.indirectOperandFactory = indirectOperandFactory; this.directOperandFactory = directOperandFactory; lexer.ConsumeTokenStrategy = new PeekTokenStrategy(new IgnoreWhiteSpaceTokenStrategy()); this.statments = new List<Statment>(); }
protected Parser(ILexer lexer) { if (lexer == null) throw new ArgumentNullException("lexer"); _lexer = lexer; _tokenQueue = new List<Token>(); _prefixParselets = new Dictionary<string, IPrefixParselet>(); _infixParselets = new Dictionary<string, IInfixParselet>(); _statementParselets = new Dictionary<string, StatementParselet>(); }
public IListSource<LNode> Parse(ILexer<Token> input, IMessageSink msgs, ParsingMode inputType = null, bool preserveComments = true) { var preprocessed = new EcsPreprocessor(input, preserveComments); var treeified = new TokensToTree(preprocessed, false); var results = Parse(treeified.Buffered(), input.SourceFile, msgs, inputType); if (preserveComments) { var injector = new EcsTriviaInjector(preprocessed.TriviaList, input.SourceFile, (int)TokenType.Newline, "/*", "*/", "//"); return injector.Run(results.GetEnumerator()).Buffered(); } else return results; }
public IEnumerable<ILexer> Match(GrammarProcessor grammar, ILexer lexer) { foreach (var tokenType in TokenTypes) { if (TokenValue == null ? lexer.Current.Is(tokenType) : lexer.Current.Is(tokenType, TokenValue)) { var newLexer = (ILexer) lexer.Clone(); newLexer.MoveNext(); yield return newLexer; } } }
public IListSource<LNode> Parse(ILexer<Token> input, IMessageSink msgs, ParsingMode inputType = null, bool preserveComments = true) { if (preserveComments) { var saver = new TriviaSaver(input, (int)TokenType.Newline); var results = Parse(saver.Buffered(), input.SourceFile, msgs, inputType); var injector = new StandardTriviaInjector(saver.TriviaList, saver.SourceFile, (int)TokenType.Newline, "/*", "*/", "//"); return injector.Run(results.GetEnumerator()).Buffered(); } else { var lexer = new WhitespaceFilter(input); return Parse(lexer.Buffered(), input.SourceFile, msgs, inputType); } }
public string GetMatch(ILexer lexer, bool onlyStatements = false) { foreach (var ruleName in _table.Where(rule => !onlyStatements || rule.IsStatement).Select(r => r.Name).Distinct()) { if (IsMatch(lexer, ruleName)) { return ruleName; } } return null; }
/// <summary> /// Tries to create a lexer for a code behind file. /// </summary> /// <param name="baseLexer">The base T4 lexer.</param> /// <returns>A C# lexer for the current code block, or <c>null</c> if none could be created.</returns> public ILexer TryCreateCodeBehindLexer(ILexer baseLexer) { if (baseLexer.TokenType == T4TokenNodeTypes.Code) { LanguageService service = _codeBehindLanguage.LanguageService(); if (service != null) { var buffer = new ProjectedBuffer(_mixedLexer.Buffer, new TextRange(_mixedLexer.PrimaryLexer.TokenStart, _mixedLexer.PrimaryLexer.AdvanceWhile(T4TokenNodeTypes.Code))); ILexer lexer = service.GetPrimaryLexerFactory().CreateLexer(buffer); lexer.Start(); return lexer; } } return null; }
public StatementParser(ILexer lexer) : base(lexer) { RegisterParselet("LEFTBRACE", new BlockParselet("RIGHTBRACE")); RegisterParselet("WHILE", new WhileParselet()); RegisterParselet("IF", new IfParselet()); RegisterParselet("FUNCTION", new FunctionDefinitionParselet()); RegisterParselet("CLASS", new ClassDefinitionParselet()); RegisterParselet("RETURN", new ReturnStatementParselet()); RegisterParselet("VAL", new VariableDeclarationParselet(true)); RegisterParselet("VAR", new VariableDeclarationParselet(false)); RegisterParselet("LEFTPAREN", new FunctionCallParselet()); InfixRight<AssignmentExpr>("ASSIGNMENT", Precedence.Assignment); }
public ReferenceName ParseMemberIdentifier(string text, IQualifier qualifier) { lexer = new CSharpLexer(new StringBuffer(text)); Start(); TreeElement firstIdentifier = ParseIdentifier(); ReferenceName referenceName = CreateMemeberIdentifier(firstIdentifier, qualifier); if(lexer.TokenType != null) { UnexpectedToken ex = new UnexpectedToken("Unexpected token"); ex.ParsingResult = referenceName; throw ex; } return referenceName; }
protected bool InsightRefreshOnComma(CodeEditorControl editor, char ch) { // Show MethodInsightWindow or IndexerInsightWindow NRefactoryResolver r = new NRefactoryResolver(ScriptControl.Parser.ProjectParser.CurrentProjectContent, _languageProperties); Location cursorLocation = new Location(editor.ActiveViewControl.Caret.Position.X + 1, editor.ActiveViewControl.Caret.Position.Y + 1); if (r.Initialize(editor.ActiveViewControl.FileName, cursorLocation.Y, cursorLocation.X)) { TextReader currentMethod = r.ExtractCurrentMethod(ScriptControl.Parser.ProjectParser.GetFileContents(editor.FileName)); if (currentMethod != null) { ILexer lexer = ParserFactory.CreateLexer(_language, currentMethod); Token token; InspectedCall call = new InspectedCall(Location.Empty, null); call.parent = call; while ((token = lexer.NextToken()) != null && token.kind != _eofToken && token.Location < cursorLocation) { if (token.kind == _commaToken) { call.commas.Add(token.Location); } else if (token.kind == _openParensToken || token.kind == _openBracketToken || token.kind == _openBracesToken) { call = new InspectedCall(token.Location, call); } else if (token.kind == _closeParensToken || token.kind == _closeBracketToken || token.kind == _closeBracesToken) { call = call.parent; } } int offset = LocationToOffset(editor, call.start); string docText = ScriptControl.Parser.ProjectParser.GetFileContents(editor.FileName); offset = docText.LastIndexOf('('); //int offset = editor.ActiveViewControl.Document.PointToIntPos(new TextPoint(call.start.X,call.start.Y));//, call.start); if (offset >= 0 && offset < docText.Length) { char c = (char)docText.Substring(offset, 1).ToCharArray(0, 1)[0]; if (c == '(') { ShowInsight(editor, new MethodInsightDataProvider(offset, true), ResolveCallParameters(editor, call), ch); return(true); } else if (c == '[') { ShowInsight(editor, new IndexerInsightDataProvider(offset, true), ResolveCallParameters(editor, call), ch); return(true); } else { //LoggingService.Warn("Expected '(' or '[' at start position"); } } } } return(false); }
public void TestDescending() { ILexer lexer = GenerateLexer(new StringReader("descending")); Assert.AreEqual(Tokens.Descending, lexer.NextToken().Kind); }
public void TestInto() { ILexer lexer = GenerateLexer(new StringReader("into")); Assert.AreEqual(Tokens.Into, lexer.NextToken().Kind); }
public Parser(ILexer lexer) { this.lexer = lexer; }
internal AbstractParser(ILexer lexer) { this.errors = lexer.Errors; this.lexer = lexer; errors.SynErr = new ErrorCodeProc(SynErr); }
public void TestYield() { ILexer lexer = GenerateLexer(new StringReader("yield")); Assert.AreEqual(Tokens.Yield, lexer.NextToken().Kind); }
public override ILexer CreateFilteringLexer(ILexer lexer) { return(lexer); }
public void TestLogicalOr() { ILexer lexer = GenerateLexer(new StringReader("||")); Assert.AreEqual(Tokens.LogicalOr, lexer.NextToken().Kind); }
public void TestBitwiseComplement() { ILexer lexer = GenerateLexer(new StringReader("~")); Assert.AreEqual(Tokens.BitwiseComplement, lexer.NextToken().Kind); }
public void TestLessThan() { ILexer lexer = GenerateLexer(new StringReader("<")); Assert.AreEqual(Tokens.LessThan, lexer.NextToken().Kind); }
public void TestNot() { ILexer lexer = GenerateLexer(new StringReader("!")); Assert.AreEqual(Tokens.Not, lexer.NextToken().Kind); }
public void TestCloseParenthesis() { ILexer lexer = GenerateLexer(new StringReader(")")); Assert.AreEqual(Tokens.CloseParenthesis, lexer.NextToken().Kind); }
public void TestCloseSquareBracket() { ILexer lexer = GenerateLexer(new StringReader("]")); Assert.AreEqual(Tokens.CloseSquareBracket, lexer.NextToken().Kind); }
public void TestCloseCurlyBrace() { ILexer lexer = GenerateLexer(new StringReader("}")); Assert.AreEqual(Tokens.CloseCurlyBrace, lexer.NextToken().Kind); }
public void TestGroup() { ILexer lexer = GenerateLexer(new StringReader("group")); Assert.AreEqual(Tokens.Group, lexer.NextToken().Kind); }
public void TestFrom() { ILexer lexer = GenerateLexer(new StringReader("from")); Assert.AreEqual(Tokens.From, lexer.NextToken().Kind); }
public void TestSelect() { ILexer lexer = GenerateLexer(new StringReader("select")); Assert.AreEqual(Tokens.Select, lexer.NextToken().Kind); }
public void TestDoubleQuestion() { ILexer lexer = GenerateLexer(new StringReader("??")); Assert.AreEqual(Tokens.DoubleQuestion, lexer.NextToken().Kind); }
public override IParser CreateParser(ILexer lexer, IPsiModule module, IPsiSourceFile sourceFile) { return(new SpringParser(lexer)); }
public void TestIncrement() { ILexer lexer = GenerateLexer(new StringReader("++")); Assert.AreEqual(Tokens.Increment, lexer.NextToken().Kind); }
public ShaderLabFilteringLexer(ILexer lexer, [CanBeNull] ShaderLabPreProcessor preProcessor) : base(lexer) { myPreProcessor = preProcessor; }
public void TestPartial() { ILexer lexer = GenerateLexer(new StringReader("partial")); Assert.AreEqual(Tokens.Partial, lexer.NextToken().Kind); }
/// <summary> /// This method follows the calculation chain to get the order of the calculation /// Goto (!) is used internally to prevent stackoverflow on extremly larget dependency trees (that is, many recursive formulas). /// </summary> /// <param name="depChain">The dependency chain object</param> /// <param name="lexer">The formula tokenizer</param> /// <param name="wb">The workbook where the formula comes from</param> /// <param name="ws">The worksheet where the formula comes from</param> /// <param name="f">The cell function object</param> private static void FollowChain(DependencyChain depChain, ILexer lexer, ExcelWorkbook wb, ExcelWorksheet ws, FormulaCell f, ExcelCalculationOption options) { Stack <FormulaCell> stack = new Stack <FormulaCell>(); iterateToken: while (f.tokenIx < f.Tokens.Count) { var t = f.Tokens[f.tokenIx]; if (t.TokenType == TokenType.ExcelAddress) { var adr = new ExcelFormulaAddress(t.Value); if (adr.Table != null) { adr.SetRCFromTable(ws._package, new ExcelAddressBase(f.Row, f.Column, f.Row, f.Column)); } if (adr.WorkSheet == null && adr.Collide(new ExcelAddressBase(f.Row, f.Column, f.Row, f.Column)) != ExcelAddressBase.eAddressCollition.No) { throw (new CircularReferenceException(string.Format("Circular Reference in cell {0}", ExcelAddressBase.GetAddress(f.Row, f.Column)))); } if (adr._fromRow > 0 && adr._fromCol > 0) { if (string.IsNullOrEmpty(adr.WorkSheet)) { if (f.ws == null) { f.ws = ws; } else if (f.ws.SheetID != f.SheetID) { f.ws = wb.Worksheets.GetBySheetID(f.SheetID); } } else { f.ws = wb.Worksheets[adr.WorkSheet]; } if (f.ws != null) { f.iterator = new CellsStoreEnumerator <object>(f.ws._formulas, adr.Start.Row, adr.Start.Column, adr.End.Row, adr.End.Column); goto iterateCells; } } } else if (t.TokenType == TokenType.NameValue) { string adrWb, adrWs, adrName; ExcelNamedRange name; ExcelAddressBase.SplitAddress(t.Value, out adrWb, out adrWs, out adrName, f.ws == null ? "" : f.ws.Name); if (!string.IsNullOrEmpty(adrWs)) { if (f.ws == null) { f.ws = wb.Worksheets[adrWs]; } if (f.ws.Names.ContainsKey(t.Value)) { name = f.ws.Names[adrName]; } else if (wb.Names.ContainsKey(adrName)) { name = wb.Names[adrName]; } else { name = null; } if (name != null) { f.ws = name.Worksheet; } } else if (wb.Names.ContainsKey(adrName)) { name = wb.Names[t.Value]; if (string.IsNullOrEmpty(adrWs)) { f.ws = name.Worksheet; } } else { name = null; } if (name != null) { if (string.IsNullOrEmpty(name.NameFormula)) { f.iterator = new CellsStoreEnumerator <object>(f.ws._formulas, name.Start.Row, name.Start.Column, name.End.Row, name.End.Column); goto iterateCells; } else { var id = ExcelAddressBase.GetCellID(name.LocalSheetId, name.Index, 0); if (!depChain.index.ContainsKey(id)) { var rf = new FormulaCell() { SheetID = name.LocalSheetId, Row = name.Index, Column = 0 }; rf.Formula = name.NameFormula; rf.Tokens = lexer.Tokenize(rf.Formula).ToList(); depChain.Add(rf); stack.Push(f); f = rf; goto iterateToken; } else { if (stack.Count > 0) { //Check for circular references foreach (var par in stack) { if (ExcelAddressBase.GetCellID(par.SheetID, par.Row, par.Column) == id) { throw (new CircularReferenceException(string.Format("Circular Reference in name {0}", name.Name))); } } } } } } } f.tokenIx++; } depChain.CalcOrder.Add(f.Index); if (stack.Count > 0) { f = stack.Pop(); goto iterateCells; } return; iterateCells: while (f.iterator.Next()) { var v = f.iterator.Value; if (v == null || v.ToString().Trim() == "") { continue; } var id = ExcelAddressBase.GetCellID(f.ws.SheetID, f.iterator.Row, f.iterator.Column); if (!depChain.index.ContainsKey(id)) { var rf = new FormulaCell() { SheetID = f.ws.SheetID, Row = f.iterator.Row, Column = f.iterator.Column }; if (f.iterator.Value is int) { rf.Formula = f.ws._sharedFormulas[(int)v].GetFormula(f.iterator.Row, f.iterator.Column); } else { rf.Formula = v.ToString(); } rf.ws = f.ws; rf.Tokens = lexer.Tokenize(rf.Formula).ToList(); ws._formulaTokens.SetValue(rf.Row, rf.Column, rf.Tokens); depChain.Add(rf); stack.Push(f); f = rf; goto iterateToken; } else { if (stack.Count > 0) { //Check for circular references foreach (var par in stack) { if (ExcelAddressBase.GetCellID(par.ws.SheetID, par.iterator.Row, par.iterator.Column) == id) { if (options.AllowCirculareReferences == false) { throw (new CircularReferenceException(string.Format("Circular Reference in cell {0}!{1}", par.ws.Name, ExcelAddress.GetAddress(f.Row, f.Column)))); } else { f = stack.Pop(); goto iterateCells; } } } } } } f.tokenIx++; goto iterateToken; }
private Parser(ILexer lexer) { this._lexer = lexer; }
public void TestBitwiseOr() { ILexer lexer = GenerateLexer(new StringReader("|")); Assert.AreEqual(Tokens.BitwiseOr, lexer.NextToken().Kind); }
public void TestWhere() { ILexer lexer = GenerateLexer(new StringReader("where")); Assert.AreEqual(Tokens.Where, lexer.NextToken().Kind); }
public void TestXor() { ILexer lexer = GenerateLexer(new StringReader("^")); Assert.AreEqual(Tokens.Xor, lexer.NextToken().Kind); }
public void TestRemove() { ILexer lexer = GenerateLexer(new StringReader("remove")); Assert.AreEqual(Tokens.Remove, lexer.NextToken().Kind); }
public void TestOrderby() { ILexer lexer = GenerateLexer(new StringReader("orderby")); Assert.AreEqual(Tokens.Orderby, lexer.NextToken().Kind); }
public void TestComma() { ILexer lexer = GenerateLexer(new StringReader(",")); Assert.AreEqual(Tokens.Comma, lexer.NextToken().Kind); }