public static void Run( TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, bool trimTokens, SeldomInterruptChecker interruptChecker, ITokenIntern intern) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } var inserter = new NTriplesMissingTokensInserter(lexer, offsetProvider, interruptChecker, intern); lexer.Start(); if (trimTokens) { using (var container = new DummyContainer(root)) { inserter.Run(container); } } else { var terminator = new EofToken(lexer.Buffer); root.AppendNewChild(terminator); inserter.Run(root); root.DeleteChildRange(terminator, terminator); } }
public IFile ParseFile() { return(myCommonIdentifierIntern.DoWithIdentifierIntern(intern => { myTokenIntern = intern; var file = (LexFile)parseLexFile(); InsertMissingTokens(file, false, intern); myTokenIntern = null; return file; })); }
public IFile ParseFile() { return commonIdentifierIntern.DoWithIdentifierIntern(intern => { myTokenIntern = intern; var file = (NTriplesFile)parseNTriplesFile(); InsertMissingTokens(file, false, intern); myTokenIntern = null; return file; }); }
public TokenFactory(ITokenIntern identifierIntern) =>
//private new readonly DataIntern<string> myWhitespaceIntern = new DataIntern<string>(); private LexMissingTokensInserter(ILexer lexer, ITokenOffsetProvider offsetProvider, SeldomInterruptChecker interruptChecker, ITokenIntern intern) : base(offsetProvider, interruptChecker, intern) { myLexer = lexer; }
private void InsertMissingTokens(TreeElement root, ITokenIntern intern) { var interruptChecker = new SeldomInterruptChecker(); ShaderLabMissingTokensInserter.Run(root, myOriginalLexer, this, interruptChecker, intern); }
public static void Run(TreeElement node, ILexer lexer, ITokenOffsetProvider offsetProvider, ShaderLabPreProcessor preProcessor, SeldomInterruptChecker interruptChecker, ITokenIntern intern) { Assertion.Assert(node.parent == null, "node.parent == null"); var root = node as CompositeElement; if (root == null) { return; } // Append an EOF token so we insert filtered tokens right up to // the end of the file var eof = new EofToken(lexer.Buffer.Length); root.AppendNewChild(eof); var inserter = new ShaderLabMissingTokensInserter(lexer, offsetProvider, preProcessor, interruptChecker, intern); // Reset the lexer, walk the tree and call ProcessLeafElement on each leaf element lexer.Start(); inserter.Run(root); root.DeleteChildRange(eof, eof); }
private ShaderLabMissingTokensInserter(ILexer lexer, ITokenOffsetProvider offsetProvider, ShaderLabPreProcessor preProcessor, SeldomInterruptChecker interruptChecker, ITokenIntern intern) : base(offsetProvider, interruptChecker, intern) { myLexer = lexer; myPreProcessor = preProcessor; }
private T4MissingTokenInserter( ILexer lexer, ITokenOffsetProvider offsetProvider, ITokenIntern intern ) : base(offsetProvider, intern) => myLexer = lexer;
private void InsertMissingTokens(TreeElement result, bool trimMissingTokens, ITokenIntern intern) { LexMissingTokensInserter.Run(result, myOriginalLexer, this, trimMissingTokens, myCheckForInterrupt, intern); }
private TreeElement PrepareElement(TreeElement compositeElement, bool trimMissingTokens, ITokenIntern intern) { InsertMissingTokens(compositeElement, trimMissingTokens, intern); return(compositeElement); }
private void InsertMissingTokens(TreeElement result, bool trimMissingTokens, ITokenIntern intern) { NTriplesMissingTokensInserter.Run(result, this.originalLexer, this, trimMissingTokens, this.myCheckForInterrupt, intern); }
private NTriplesMissingTokensInserter( ILexer lexer, ITokenOffsetProvider offsetProvider, SeldomInterruptChecker interruptChecker, ITokenIntern intern) : base(offsetProvider, interruptChecker, intern) { this.myLexer = lexer; }