public void Initialize_PdlTests() { _parseEngine = new ParseEngine( pdlGrammar, new ParseEngineOptions(loggingEnabled: true) ); }
public Client(IPAddress ipa, int port, IParseEngine parseEngine, byte[] keepAliveBuffer) { this.IpAddress = ipa; this.Port = port; this._parseEngine = parseEngine; _keepAliveBuffer = keepAliveBuffer; }
public void Initialize_EbnfTests() { this.parseEngine = new ParseEngine( ebnfGrammar, new ParseEngineOptions(loggingEnabled: true) ); }
private static void RunParseWithCustomLexer(IParseEngine parser) { var path = Path.Combine(Directory.GetCurrentDirectory(), "10000.json"); var jsonLexer = new JsonLexer(); using (var stream = File.OpenRead(path)) using (var reader = new StreamReader(stream)) { var tokens = jsonLexer.Lex(reader); foreach (var token in tokens) { if (token.TokenType != JsonLexer.Whitespace) { if (!parser.Pulse(token)) { Assert.Fail($"unable to parse token {token.TokenType} at {token.Position}"); } } } } if (!parser.IsAccepted()) { Assert.Fail("Parse was not accepted"); } }
public void Reset(IGrammarLexerRule newGrammarRule) { LexerRule = newGrammarRule; _capture.Clear(); _parseEngine = new ParseEngine(newGrammarRule.Grammar); }
protected void Initialize(IGrammar grammar, ParseEngineOptions options = null) { if (options is null) { options = new ParseEngineOptions(optimizeRightRecursion: true, loggingEnabled: true); } _parseEngine = new ParseEngine(grammar, options); }
private void InitProperties(char lineDelimiter, char delimiter, char quote, bool hasHeaderRow) { this.LineDelimiter = lineDelimiter; this.Delimiter = delimiter; this.Quote = quote; this.HasHeaderRow = hasHeaderRow; //Using excel parser as default this.ParseEngine = new ParseEngines.ExcelParseEngine(); }
public ParseRunner(IParseEngine parseEngine, TextReader reader) { ParseEngine = parseEngine; _reader = reader; _existingLexemes = new List <ILexeme>(); _ignoreLexemes = new List <ILexeme>(); _lexemeFactoryRegistry = new LexemeFactoryRegistry(); RegisterDefaultLexemeFactories(_lexemeFactoryRegistry); Position = 0; }
private static bool RunParse(IParseEngine parseEngine, IEnumerable <IToken> tokens) { foreach (var token in tokens) { if (!parseEngine.Pulse(token)) { return(false); } } return(parseEngine.IsAccepted()); }
public ParseRunner(IParseEngine parseEngine, TextReader reader) { ParseEngine = parseEngine; this.reader = reader; this.tokenLexemes = new List <Lexeme>(); this.ignoreLexemes = new List <Lexeme>(); this.triviaLexemes = new List <Lexeme>(); this.triviaAccumulator = new List <Lexeme>(); this.previousTokenLexemes = new List <Lexeme>(); Position = 0; }
public ParseInterface(IParseEngine parseEngine, TextReader input) { _textReader = input; _lexemeFactoryRegistry = new LexemeFactoryRegistry(); _lexemeFactoryRegistry.Register(new TerminalLexemeFactory()); _lexemeFactoryRegistry.Register(new ParseEngineLexemeFactory()); _lexemeFactoryRegistry.Register(new StringLiteralLexemeFactory()); _lexemeFactoryRegistry.Register(new DfaLexemeFactory()); Position = -1; ParseEngine = parseEngine; }
public ParseRunner(IParseEngine parseEngine, TextReader input) { _textReader = input; _lexemeFactoryRegistry = new LexemeFactoryRegistry(); RegisterDefaultLexemeFactories(_lexemeFactoryRegistry); _ignoreLexemes = new List<ILexeme>(); _existingLexemes = new List<ILexeme>(); Position = 0; ParseEngine = parseEngine; }
public ParseRunner(IParseEngine parseEngine, TextReader reader) { ParseEngine = parseEngine; _reader = reader; _tokenLexemes = new List <ILexeme>(); _ignoreLexemes = new List <ILexeme>(); _triviaLexemes = new List <ILexeme>(); _triviaAccumulator = new List <ILexeme>(); _lexemeFactoryRegistry = new LexemeFactoryRegistry(); _builder = new StringBuilder(); _capture = new StringBuilderCapture(_builder); RegisterDefaultLexemeFactories(_lexemeFactoryRegistry); Position = -1; }
private static void RunParseWithCustomLexer(IParseEngine parser) { var path = Path.Combine(Directory.GetCurrentDirectory(), "10000.json"); var jsonLexer = new JsonLexer(); var tokens = jsonLexer.Lex(File.ReadAllText(path)); foreach (var token in tokens) { if (!Equals(token.TokenName, JsonLexer.Whitespace)) { if (!parser.Pulse(token)) { Assert.Fail($"unable to parse token {token.TokenName} at {token.Position}"); } } } if (!parser.IsAccepted()) { Assert.Fail("Parse was not accepted"); } }
private bool RunParse(IParseEngine parseEngine, string input) { var parseRunner = new ParseRunner(parseEngine, input); return(parseRunner.RunToEnd()); }
public ParseRunner(IParseEngine parseEngine, string input) : this(parseEngine, new StringReader(input)) { }
public void Initialize_Regex_Tests() { _parseEngine = new ParseEngine(_regexGrammar); }
public void Initialize_EbnfTests() { _parseEngine = new ParseEngine(ebnfGrammar); }
private static void ParseInput(IParseEngine parseEngine, IReadOnlyList <IToken> tokens) { var parseTester = new ParseTester(parseEngine); parseTester.RunParse(tokens); }
public override void Reset() { _capture.Clear(); _parseEngine = new ParseEngine(ConcreteLexerRule.Grammar); }
public ParseTester(IParseEngine parseEngine) { Grammar = parseEngine.Grammar; ParseEngine = parseEngine; }
public override void Reset() { _parseEngine = new ParseEngine(ConcreteLexerRule.Grammar); }
public ParseEngineLexeme(IGrammarLexerRule lexerRule, ICapture <char> segment, int offset) : base(lexerRule, segment, offset) { _parseEngine = new ParseEngine(lexerRule.Grammar); }
public void Initialize_Regex_Tests() { this.parseEngine = new ParseEngine(this.regexGrammar); }
public ParseInterface(IParseEngine parseEngine, string input) : this(parseEngine, new StringReader(input)) { }
public ParseEngineLexeme(IGrammarLexerRule lexerRule) { _capture = new StringBuilder(); _parseEngine = new ParseEngine(lexerRule.Grammar); LexerRule = lexerRule; }
public ParseEngineLexeme(IParseEngine parseEngine, TokenType tokenType) { TokenType = tokenType; _capture = new StringBuilder(); _parseEngine = parseEngine; }