public void AycockHorspoolAlgorithmShouldAcceptVulnerableGrammar() { var a = new TerminalLexerRule( new CharacterTerminal('a'), new TokenType("a")); ProductionExpression SPrime = "S'", S = "S", A = "A", E = "E"; SPrime.Rule = S; S.Rule = (Expr)S | A + A + A + A; A.Rule = (Expr)"a" | E; var expression = new GrammarExpression( SPrime, new[] { SPrime, S, A, E }); var grammar = expression.ToGrammar(); var parseEngine = new ParseEngine(grammar); parseEngine.Pulse(new Token("a", 0, a.TokenType)); var privateObject = new PrivateObject(parseEngine); var chart = privateObject.GetField("_chart") as Chart; Assert.IsNotNull(chart); Assert.AreEqual(2, chart.Count); Assert.IsTrue(parseEngine.IsAccepted()); }
public Regex Parse(string regularExpression) { var grammar = new RegexGrammar(); var parseEngine = new ParseEngine(grammar, new ParseEngineOptions(optimizeRightRecursion: true)); var parseRunner = new ParseRunner(parseEngine, regularExpression); while (!parseRunner.EndOfStream()) { if (!parseRunner.Read()) throw new Exception( $"Unable to parse regular expression. Error at position {parseRunner.Position}."); } if (!parseEngine.IsAccepted()) throw new Exception( $"Error parsing regular expression. Error at position {parseRunner.Position}"); var parseForest = parseEngine.GetParseForestRootNode(); var parseTree = new InternalTreeNode( parseForest as IInternalForestNode, new SelectFirstChildDisambiguationAlgorithm()); var regexVisitor = new RegexVisitor(); parseTree.Accept(regexVisitor); return regexVisitor.Regex; }
#pragma warning disable CC0091 // Use static method public EbnfDefinition Parse(string ebnf) { var grammar = new EbnfGrammar(); var parseEngine = new ParseEngine(grammar, new ParseEngineOptions(optimizeRightRecursion: true)); var parseRunner = new ParseRunner(parseEngine, ebnf); while (!parseRunner.EndOfStream()) { if (!parseRunner.Read()) throw new Exception( $"Unable to parse Ebnf. Error at position {parseRunner.Position}."); } if (!parseEngine.IsAccepted()) throw new Exception( $"Unable to parse Ebnf. Error at position {parseRunner.Position}"); var parseForest = parseEngine.GetParseForestRootNode(); var parseTree = new InternalTreeNode( parseForest as IInternalForestNode, new SelectFirstChildDisambiguationAlgorithm()); var ebnfVisitor = new EbnfVisitor(); parseTree.Accept(ebnfVisitor); return ebnfVisitor.Definition; }
public bool Parse() { var grammar = new BnfGrammar(); var parseEngine = new ParseEngine(grammar); var parseRunner = new ParseRunner(parseEngine, sampleBnf); while (!parseRunner.EndOfStream() && parseRunner.Read()) { } return parseRunner.ParseEngine.IsAccepted(); }
public bool RunToEnd() { while (!EndOfStream()) { if (!Read()) { return(false); } } return(ParseEngine.IsAccepted()); }
public void Reset() { ParseEngine = new ParseEngine(Grammar); }
public ParseTester(IGrammar grammar) { Grammar = grammar; ParseEngine = new ParseEngine(Grammar); }
private bool TryParseExistingToken() { var anyLexemes = this.tokenLexemes.Count > 0; if (!anyLexemes) { return(false); } var i = 0; var size = this.tokenLexemes.Count; while (i < size) { var lexeme = this.tokenLexemes[i]; if (lexeme.IsAccepted()) { i++; } else { if (i < size - 1) { this.tokenLexemes[i] = this.tokenLexemes[size - 1]; this.tokenLexemes[size - 1] = lexeme; } size--; } } var anyMatches = size > 0; if (!anyMatches) { return(false); } i = this.tokenLexemes.Count - 1; while (i >= size) { this.tokenLexemes.RemoveAt(i); i--; } if (!ParseEngine.Pulse(this.tokenLexemes)) { return(false); } for (i = 0; i < this.triviaAccumulator.Count; i++) { foreach (var tokenLexeme in this.tokenLexemes) { tokenLexeme.AddLeadingTrivia(this.triviaAccumulator[i]); } } this.triviaAccumulator.Clear(); this.previousTokenLexemes.Clear(); this.previousTokenLexemes.AddRange(this.tokenLexemes); this.tokenLexemes.Clear(); return(true); }
private bool MatchesNewTokenLexemes(char character) { return(MatchLexers(character, ParseEngine.GetExpectedLexerRules(), this.tokenLexemes)); }