private static void CreateStatesSymbolsAndSymbolPaths( IGrammar grammar, out Dictionary <IProduction, Dictionary <int, DottedRule> > states, out Dictionary <ISymbol, UniqueList <ISymbol> > symbolPaths) { states = new Dictionary <IProduction, Dictionary <int, DottedRule> >(); symbolPaths = new Dictionary <ISymbol, UniqueList <ISymbol> >(); for (var p = 0; p < grammar.Productions.Count; p++) { var production = grammar.Productions[p]; var stateIndex = states.AddOrGetExisting(production); var leftHandSide = production.LeftHandSide; var symbolPath = symbolPaths.AddOrGetExisting(leftHandSide); for (var s = 0; s <= production.RightHandSide.Count; s++) { var preComputedState = new DottedRule(production, s); stateIndex.Add(s, preComputedState); if (s < production.RightHandSide.Count) { var postDotSymbol = production.RightHandSide[s]; symbolPath.AddUnique(postDotSymbol); } } } }
private static void IsWrong(IGrammar grammar) { var result = grammar.Execute(); result.Counts.Rights.ShouldEqual(0); result.Counts.Wrongs.ShouldEqual(1); }
void IRule.CreateScript(IGrammar grammar, string rule, string method, RuleMethodScript type) { switch (type) { case RuleMethodScript.onInit: _onInit = method; break; case RuleMethodScript.onParse: _onParse = method; break; case RuleMethodScript.onRecognition: _onRecognition = method; break; case RuleMethodScript.onError: _onError = method; break; default: // unknown method!!! System.Diagnostics.Debug.Assert(false); break; } }
public LRParser(ILexer lexer, IGrammar grammar) : base(lexer, grammar) { var scriptLoader = new GenericScriptLoader(); if (scriptLoader.Assembly == null) { Console.ForegroundColor = ConsoleColor.Red; Console.WriteLine("Errors on scripts, can't continue..."); Console.ResetColor(); _error = true; } else { _semanticAnalyzer = scriptLoader.GetSemanticAnalyzer(); _translator = scriptLoader.GetTranslator(); _parsingTable = new ParsingTable(grammar); } if (DEBUG_TABLE) { //_parsingTable.PrintTable(); _parsingTable.SaveTable(); } }
public SyntaxNode(IGrammar grammar, ISyntax syntax, Token token) { this.Grammar = grammar; this.Syntax = syntax; this.Token = token; this.State = State.Valid; }
public static Concept Optional(this Concept concept, IGrammar grammar) { var optional = new Optional($"{concept.Name} (optional)", grammar); concept.Grammar = optional; return(concept); }
internal static string ToDebugString <T>(this IParseTable <T> table, IGrammar <T> grammar, int numStates) { int numTokens = grammar.AllSymbols.Count() - 1; int numTerminals = grammar.AllSymbols.OfType <Terminal <T> >().Count(); var formatString = new StringBuilder("{0,8}|"); for (int i = 0; i < numTokens; ++i) { if (i == numTerminals) { formatString.Append("|"); // Extra bar to separate actions and gotos } formatString.Append("|{" + (i + 1) + ",8}"); } formatString.Append("|\n"); string format = formatString.ToString(); var sb = new StringBuilder(); sb.Append(string.Format(format, new[] { "STATE" }.Concat(grammar.AllSymbols.Select(f => f.DebugName)).ToArray <object>())); for (int i = 0; i < numStates; ++i) { object[] formatParams = new[] { i.ToString() }.Concat(grammar.AllSymbols.OfType <Terminal <T> >().Select(f => { var actionValue = table.Action[i, f.TokenNumber]; if (actionValue == short.MaxValue) { return("acc"); } if (actionValue == short.MinValue) { return(""); } if (actionValue < 0) { return("r" + -(actionValue + 1)); } return("s" + actionValue); }).Concat(grammar.AllSymbols.OfType <NonTerminal <T> >().Where(f => f.ProductionRules.All(p => p.ResultSymbol != grammar.AcceptSymbol)).Select(f => table.Goto[i, f.TokenNumber - numTerminals] == short.MinValue ? "" : table.Goto[i, f.TokenNumber - numTerminals].ToString()))).ToArray <object>(); // If formatparams is all empty, we have run out of table to process. // This is perhaps not the best way to determine if the table has ended but the grammar // has no idea of the number of states, and I'd rather not mess up the interface // with methods to get the number of states. if (formatParams.Distinct().Count() == 2) { // All empty strings and one state. break; } sb.Append(string.Format(format, formatParams)); } return(sb.ToString()); }
/// <summary> /// Process the top level grammar element /// </summary> private void ProcessGrammarElement(SrgsGrammar source, IGrammar grammar) { grammar.Culture = source.Culture; grammar.Mode = source.Mode; if (source.Root != null) { grammar.Root = source.Root.Id; } grammar.TagFormat = source.TagFormat; grammar.XmlBase = source.XmlBase; grammar.GlobalTags = source.GlobalTags; grammar.PhoneticAlphabet = source.PhoneticAlphabet; // Process child elements. foreach (SrgsRule srgsRule in source.Rules) { IRule rule = ParseRule(grammar, srgsRule); rule.PostParse(grammar); } grammar.AssemblyReferences = source.AssemblyReferences; grammar.CodeBehind = source.CodeBehind; grammar.Debug = source.Debug; grammar.ImportNamespaces = source.ImportNamespaces; grammar.Language = source.Language == null ? "C#" : source.Language; grammar.Namespace = source.Namespace; // if add the content to the generic _scrip _parser.AddScript(grammar, source.Script, null, -1); // Finish all initialization - should check for the Root and the all // rules are defined grammar.PostParse(null); }
public static SyntaxNode Create(IGrammar grammar, ISyntax syntax, Token token) { var state = token.IsValid ? State.Valid : State.Error; var node = new SyntaxNode(grammar, syntax, token, state); return(node); }
public Fixture() { _grammars.OnAddition = readGrammar; MethodExtensions.ForAttribute <HiddenAttribute>(GetType(), x => Policies.IsPrivate = true); MethodExtensions.ForAttribute <TagAttribute>(GetType(), x => x.Tags.Each(t => Policies.Tag(t))); GetType().GetMethods(BindingFlags.Public | BindingFlags.Instance).Where(methodFromThis).Each(method => { string grammarKey = method.GetKey(); try { IGrammar grammar = GrammarBuilder.BuildGrammar(method, this); this[grammarKey] = grammar; MethodExtensions.ForAttribute <HiddenAttribute>(method, x => _policies.HideGrammar(grammarKey)); MethodExtensions.ForAttribute <TagAttribute>(method, x => x.Tags.Each(t => _policies.Tag(grammarKey, t))); } catch (Exception e) { _errors.Add(new GrammarError { ErrorText = e.ToString(), Message = "Could not create Grammar '{0}' of Fixture '{1}'".ToFormat(grammarKey, GetType().GetFixtureAlias()) }); } }); }
public SyntaxNode(IGrammar grammar, Token token) { this.Grammar = grammar; this.Syntax = null; this.Token = token; this.State = State.Valid; }
static BnfGrammar() { /* * <grammar> ::= <rule> | <rule> <grammar> * <rule> ::= "<" <rule-name> ">" "::=" <expression> * <expression> ::= <list> | <list> "|" <expression> * <line-end> ::= <EOL> | <line-end> <line-end> * <list> ::= <term> | <term> <list> * <term> ::= <literal> | "<" <rule-name> ">" * <literal> ::= '"' <text> '"' | "'" <text> "'" */ var whitespace = CreateWhitespaceLexerRule(); var ruleName = CreateRuleNameLexerRule(); var implements = CreateImplementsLexerRule(); var eol = CreateEndOfLineLexerRule(); var notDoubleQuote = CreateNotDoubleQuoteLexerRule(); var notSingleQuuote = CreateNotSingleQuoteLexerRule(); var grammar = new NonTerminal("grammar"); var rule = new NonTerminal("rule"); var identifier = new NonTerminal("identifier"); var expression = new NonTerminal("expression"); var lineEnd = new NonTerminal("line-end"); var list = new NonTerminal("list"); var term = new NonTerminal("term"); var literal = new NonTerminal("literal"); var doubleQuoteText = new NonTerminal("doubleQuoteText"); var singleQuoteText = new NonTerminal("singleQuoteText"); var lessThan = new TerminalLexerRule('<'); var greaterThan = new TerminalLexerRule('>'); var doubleQuote = new TerminalLexerRule('"'); var slash = new TerminalLexerRule('\''); var pipe = new TerminalLexerRule('|'); var productions = new[] { new Production(grammar, rule), new Production(grammar, rule, grammar), new Production(rule, identifier, implements, expression), new Production(expression, list), new Production(expression, list, pipe, expression), new Production(lineEnd, eol), new Production(lineEnd, lineEnd, lineEnd), new Production(list, term), new Production(list, term, list), new Production(term, literal), new Production(term, identifier), new Production(identifier, lessThan, ruleName, greaterThan), new Production(literal, doubleQuote, notDoubleQuote, doubleQuote), new Production(literal, slash, notSingleQuuote, slash) }; var ignore = new[] { whitespace }; _bnfGrammar = new Grammar(grammar, productions, ignore); }
private IRule ParseRule(IGrammar grammar, SrgsRule srgsRule) { string id = srgsRule.Id; bool hasSCript = srgsRule.OnInit != null || srgsRule.OnParse != null || srgsRule.OnError != null || srgsRule.OnRecognition != null; IRule rule = grammar.CreateRule(id, (srgsRule.Scope != 0) ? RulePublic.False : RulePublic.True, srgsRule.Dynamic, hasSCript); if (srgsRule.OnInit != null) { rule.CreateScript(grammar, id, srgsRule.OnInit, RuleMethodScript.onInit); } if (srgsRule.OnParse != null) { rule.CreateScript(grammar, id, srgsRule.OnParse, RuleMethodScript.onParse); } if (srgsRule.OnError != null) { rule.CreateScript(grammar, id, srgsRule.OnError, RuleMethodScript.onError); } if (srgsRule.OnRecognition != null) { rule.CreateScript(grammar, id, srgsRule.OnRecognition, RuleMethodScript.onRecognition); } if (srgsRule.Script.Length > 0) { _parser.AddScript(grammar, id, srgsRule.Script); } rule.BaseClass = srgsRule.BaseClass; foreach (SrgsElement sortedTagElement in GetSortedTagElements(srgsRule.Elements)) { ProcessChildNodes(sortedTagElement, rule, rule); } return(rule); }
public ParseEngine(IGrammar grammar, ParseEngineOptions options) { Options = options; _nodeSet = new ForestNodeSet(); Grammar = grammar; Initialize(); }
private void ProcessGrammarElement(SrgsGrammar source, IGrammar grammar) { grammar.Culture = source.Culture; grammar.Mode = source.Mode; if (source.Root != null) { grammar.Root = source.Root.Id; } grammar.TagFormat = source.TagFormat; grammar.XmlBase = source.XmlBase; grammar.GlobalTags = source.GlobalTags; grammar.PhoneticAlphabet = source.PhoneticAlphabet; foreach (SrgsRule rule2 in source.Rules) { IRule rule = ParseRule(grammar, rule2); rule.PostParse(grammar); } grammar.AssemblyReferences = source.AssemblyReferences; grammar.CodeBehind = source.CodeBehind; grammar.Debug = source.Debug; grammar.ImportNamespaces = source.ImportNamespaces; grammar.Language = ((source.Language == null) ? "C#" : source.Language); grammar.Namespace = source.Namespace; _parser.AddScript(grammar, source.Script, null, -1); grammar.PostParse(null); }
private static RelationMatrix B_Relation( IGrammar grammar, ISet<GrammaticalSymbol> epsilon_grammaticals ) { var relation = new RelationMatrix(grammar.Terminals.Count + grammar.Grammaticals.Count); foreach( var rule in grammar.Rules ) for( int i = 1; i < rule.RightHandSide.Count; ++i ) { relation[ grammar.GlobalIndexOf(rule.RightHandSide[i - 1]), grammar.GlobalIndexOf(rule.RightHandSide[i])] = true; for( int j = i + 1; j < rule.RightHandSide.Count; ++j ) if( rule.RightHandSide[j - 1] is GrammaticalSymbol && epsilon_grammaticals.Contains((GrammaticalSymbol)rule.RightHandSide[j - 1]) ) { relation[ grammar.GlobalIndexOf(rule.RightHandSide[i - 1]), grammar.GlobalIndexOf(rule.RightHandSide[j])] = true; } else { break; } } return relation; }
public static Installation InstallTextMate( this TextEditor editor, ThemeName theme, IGrammar grammar = null) { return(new Installation(editor, theme, grammar)); }
/// <summary> /// Constructs a new <see cref="TerminalReader"/> which can recoganize the specified <see cref="IGrammar"/>. /// </summary> /// <param name="grammar">The <see cref="IGrammar"/> to be recognized by the <see cref="TerminalReader"/>.</param> /// <returns>A <see cref="TerminalReaderGeneratorResult"/> containing <see cref="TerminalReader"/> and information pertaining to the /// success or failure of the generation process. /// </returns> public TerminalReaderGeneratorResult GenerateTerminalReader(IGrammar grammar) { ITerminalReader terminalReader = new TerminalReader(grammar.GetTerminals(), grammar.StopTerminal); var result = new TerminalReaderGeneratorResult(terminalReader); return result; }
public Repetition(string name, IGrammar item, ISyntax whitespace, int mincount) { this.Name = name; this.whitespace = whitespace; this.mincount = mincount; this.item = item; }
/// <summary> /// This is the method used by Piglets parserfactory to obtain preconfigured lexers. /// </summary> /// <param name="grammar">Grammar to generate lexers from</param> /// <param name="lexerSettings">Additional lexing settings</param> /// <returns>A lexer compatibe with the given grammars tokenizing rules</returns> internal static ILexer <T> ConfigureFromGrammar(IGrammar <T> grammar, ILexerSettings lexerSettings) => // This works because the grammar tokens will recieve the same token number // since they are assigned to this list in just the same way. AND BECAUSE the // end of input token is LAST. if this is changed it WILL break. // This might be considered dodgy later on, since it makes it kinda sorta hard to // use other lexers with Piglet. Let's see what happens, if anyone ever wants to write their // own lexer for Piglet. Configure(c => { c.Runtime = lexerSettings.Runtime; c.IgnoreCase = lexerSettings.IgnoreCase; List <ITerminal <T> > terminals = grammar.AllSymbols.OfType <ITerminal <T> >().ToList(); foreach (ITerminal <T> terminal in terminals) { if (terminal.RegExp != null) { c.Token(terminal.RegExp, terminal.OnParse); } } c.EndOfInputTokenNumber = terminals.FindIndex(f => f == grammar.EndOfInputTerminal); foreach (string ignored in lexerSettings.Ignore) { c.Ignore(ignored); } });
void IElementFactory.AddScript(IGrammar grammar, string script, string filename, int line) { if (line >= 0) { if (_cg._language == "C#") { _cg._script.Append("#line "); _cg._script.Append(line.ToString(CultureInfo.InvariantCulture)); _cg._script.Append(" \""); _cg._script.Append(filename); _cg._script.Append("\"\n"); _cg._script.Append(script); } else { _cg._script.Append("#ExternalSource ("); _cg._script.Append(" \""); _cg._script.Append(filename); _cg._script.Append("\","); _cg._script.Append(line.ToString(CultureInfo.InvariantCulture)); _cg._script.Append(")\n"); _cg._script.Append(script); _cg._script.Append("#End #ExternalSource\n"); } } else { _cg._script.Append(script); } }
public static Concept Repetition(this Concept concept, IGrammar grammar, int mincount = 0) { var repetition = new Repetition(concept.Name + " (repetition)", grammar, concept.Whitespace, mincount); concept.Grammar = repetition; return(concept); }
public static TableGrammar AsTable(this IGrammar inner, string title) { return(new TableGrammar(inner.As <IGrammarWithCells>(), "Rows") { LabelName = title }); }
public void Read(string path, IGrammar grammar) { using (TextReader reader = File.OpenText(path)) { string line; while ((line = reader.ReadLine()) != null) { int equalsIndex = line.IndexOf('='); if (equalsIndex == -1) { continue; } string name = line.Substring(0, equalsIndex).Trim(); string ruletext = line.Substring(equalsIndex + 1); if (name.Length == 0) { continue; } grammar[name] = Compiler.Compile(ruletext); } } }
public SyntaxNode(IGrammar grammar, ISyntax syntax, Token token, State state) { this.Grammar = grammar; this.Syntax = syntax; this.Token = token; this.State = state; }
public void SetGrammar(IGrammar grammar) { _grammar = grammar; GetOrCreateTransformer().SetGrammar(grammar); _editor.TextArea.TextView.Redraw(); }
public ChatterBox(IGrammar grammar) { _grammar = grammar; _processors = new List<IProcessor>(); _processors.Add(new LikeProcessor()); _processors.Add(new DistanceProcessor()); }
public Interlace(string name, IGrammar glue, IGrammar item, ISyntax whitespace, int mincount) { this.Name = name; this.whitespace = whitespace; this.Item = item; this.Glue = glue; this.mincount = mincount; }
private void readGrammar(IGrammar grammar, FixtureGraph fixtureGraph, string key) { GrammarStructure structure = grammar.ToStructure(_library); structure.Description = grammar.Description; fixtureGraph.AddStructure(key, structure); }
public void SetGrammar(string scopeName) { _grammar = _textMateRegistry.LoadGrammar(scopeName); GetOrCreateTransformer().SetGrammar(_grammar); _editor.TextArea.TextView.Redraw(); }
public void Register <TArg>(string name, IGrammar grammar, Func <TArg, string> exec) { _registrar.Add(name, new RegistrarItem { Grammar = grammar, Service = new DelegateService <TArg>(name, exec) }); }
public void Register <TArg>(string name, IGrammar grammar, Func <IYield <string, string>, TArg, Task> exec) { _registrar.Add(name, new RegistrarItem { Grammar = grammar, Service = new GeneratorService <TArg>(name, exec) }); }
public static bool Compile(this IGrammar grammar, ISourceFile file, out SyntaxNode ast) { var lexer = new Lexer(file); bool success = grammar.Parse(ref lexer, out ast); ast.Prune(); return(success); }
/// <summary> /// Constructs a new <see cref="TerminalReader"/> which can recoganize the specified <see cref="IGrammar"/>. /// </summary> /// <param name="grammar">The <see cref="IGrammar"/> to be recognized by the <see cref="TerminalReader"/>.</param> /// <returns>A <see cref="TerminalReaderGeneratorResult"/> containing <see cref="TerminalReader"/> and information pertaining to the /// success or failure of the generation process. /// </returns> public TerminalReaderGeneratorResult GenerateTerminalReader(IGrammar grammar) { ITerminalReader terminalReader = new TerminalReader(grammar.GetTerminals(), grammar.StopTerminal); TerminalReaderGeneratorResult result = new TerminalReaderGeneratorResult(terminalReader); return(result); }
private IGrammar inner(ITestContext context) { if (_inner == null) { _inner = _import.FindGrammar(context); } return _inner; }
private IGrammar inner() { if (_inner == null) { _inner = _import.FindGrammar(_fixtureSource()); } return _inner; }
public void Register(IRegistrar registrar) { var builder = new GrammarBuilder<DirectionsArgs>(); builder.AddArgument(a => a.Source, "starting at", "from"); builder.AddArgument(a => a.Destination, "ending at", "to"); _grammar = builder.Grammar; registrar.Register<DirectionsArgs>("directions", _grammar, GetDirections); }
static BnfGrammar() { /* * Grammar * ------- * <syntax> ::= <rule> | <rule> <syntax> * <rule> ::= "<" <rule-name> ">" "::=" <expression> * <expression> ::= <list> | <list> "|" <expression> * <line-end> ::= <EOL> | <line-end> <line-end> * <list> ::= <term> | <term> <list> * <term> ::= <literal> | "<" <rule-name> ">" * <literal> ::= '"' <text> '"' | "'" <text> "'" */ var whitespace = CreateWhitespaceLexerRule(); var ruleName = CreateRuleNameLexerRule(); var implements = CreateImplementsLexerRule(); var eol = CreateEndOfLineLexerRule(); var notDoubleQuote = CreateNotDoubleQuoteLexerRule(); var notSingleQuuote = CreateNotSingleQuoteLexerRule(); var syntax = new NonTerminal("syntax"); var rule = new NonTerminal("rule"); var identifier = new NonTerminal("identifier"); var expression = new NonTerminal("expression"); var lineEnd = new NonTerminal("line-end"); var list = new NonTerminal("list"); var term = new NonTerminal("term"); var literal = new NonTerminal("literal"); var doubleQuoteText = new NonTerminal("doubleQuoteText"); var singleQuoteText = new NonTerminal("singleQuoteText"); var productions = new[] { new Production(syntax, rule), new Production(syntax, rule, syntax), new Production(rule, identifier, implements, expression), new Production(expression, list), new Production(expression, list, new TerminalLexerRule('|'), expression), new Production(lineEnd, eol), new Production(lineEnd, lineEnd, lineEnd), new Production(list, term), new Production(list, term, list), new Production(term, literal), new Production(term, identifier), new Production(identifier, new TerminalLexerRule('<'), ruleName, new TerminalLexerRule('>')), new Production(literal, new TerminalLexerRule('"'), notDoubleQuote, new TerminalLexerRule('"')), new Production(literal, new TerminalLexerRule('\''), notSingleQuuote, new TerminalLexerRule('\'')) }; var ignore = new[] { whitespace }; _bnfGrammar = new Grammar(syntax, productions, new ILexerRule[] { }, ignore); }
static SimpleExpressionGrammar() { var digit = new DigitTerminal(); ProductionExpression E = "E"; E.Rule = E + "+" + E | E + "*" + E | digit; _innerGrammar = new GrammarExpression(E, new[] { E }).ToGrammar(); }
internal static RelationMatrix F_Relation( IGrammar grammar ) { var relation = new RelationMatrix(grammar.Terminals.Count + grammar.Grammaticals.Count); foreach( var rule in grammar.Rules ) if( rule.RightHandSide.Count > 0 ) relation[ grammar.GlobalIndexOf(rule.LeftHandSide), grammar.GlobalIndexOf(rule.RightHandSide[0])] = true; return relation; }
/// <summary> /// Initializes a new instance of the <see cref="TextToSchedule" /> class. /// </summary> /// <param name="grammar">The grammar.</param> /// <param name="helper">The helper.</param> /// <exception cref="System.ArgumentNullException">grammar</exception> public TextToSchedule(IGrammar grammar, IGrammarHelper helper) { if (grammar == null) throw new ArgumentNullException("grammar"); if (helper == null) throw new ArgumentNullException("helper"); Grammar = grammar; GrammarHelper = helper; }
static JsonGrammar() { ProductionExpression Json = "Json", Object = "Object", Pair = "Pair", PairRepeat = "PairRepeat", Array = "Array", Value = "Value", ValueRepeat = "ValueRepeat"; var number = new NumberLexerRule(); var @string = String(); Json.Rule = Value; Object.Rule = '{' + PairRepeat + '}'; PairRepeat.Rule = Pair | Pair + ',' + PairRepeat | (Expr)null; Pair.Rule = (Expr)@string + ':' + Value; Array.Rule = '[' + ValueRepeat + ']'; ValueRepeat.Rule = Value | Value + ',' + ValueRepeat | (Expr)null; Value.Rule = (Expr) @string | number | Object | Array | "true" | "false" | "null"; _innerGrammar = new GrammarExpression( Json, null, new[] { new WhitespaceLexerRule() }) .ToGrammar(); }
private Parser() { Lingua.Grammar grammar = new Lingua.Grammar(); grammar.Load(Assembly.GetCallingAssembly(), "Prolog"); grammar.LoadRules(Assembly.GetCallingAssembly(), "Prolog"); grammar.Resolve(); m_grammar = grammar; TerminalReaderGenerator terminalReaderGenerator = new TerminalReaderGenerator(); TerminalReaderGeneratorResult terminalReaderGeneratorResult = terminalReaderGenerator.GenerateTerminalReader(m_grammar); m_terminalReader = terminalReaderGeneratorResult.TerminalReader; ParserGenerator parserGenerator = new ParserGenerator(); ParserGeneratorResult parserGeneratorResult = parserGenerator.GenerateParser(m_grammar); m_parser = parserGeneratorResult.Parser; }
/// <summary> /// Collects and returns the grammaticals, from which the epsilon can be derived, in a list. /// </summary> /// <param name="grammar">The grammar to examen.</param> /// <returns>The list of <see cref="GrammaticalSymbol"/>s which epsilon can be derived from.</returns> /// <exception cref="ArgumentNullException"> if <paramref name="grammar"/> is <c>null</c>.</exception> public static ISet<GrammaticalSymbol> Build( IGrammar grammar ) { if( grammar == null ) throw new ArgumentNullException("grammar"); var result = new HashSet<GrammaticalSymbol>(); // Put all direct-epsilon-rules' left hand side into results foreach( var rule in grammar.Rules ) if( rule.IsEpsilonRule ) result.Add(rule.LeftHandSide); // Iterate on it, while it changes bool changed = true; while( changed ) { changed = false; foreach( var rule in grammar.Rules ) { // Only examen symboles not already in the result set if( result.Contains(rule.LeftHandSide) ) continue; bool ok = true; // Does the rule's right hand side only contains grammatical symboles already in the result? foreach( var symbol in rule.RightHandSide ) if( !(symbol is GrammaticalSymbol) || !result.Contains(symbol) ) { ok = false; break; } // ... If so, add it to the result if( ok ) { result.Add(rule.LeftHandSide); changed = true; } } } return result; }
public ParseEngine(IGrammar grammar) { _nodeSet = new NodeSet(); Grammar = grammar; Initialize(); }
public GrammarLexerRule(TokenType tokenType, IGrammar grammar) : base(GrammarLexerRuleType, tokenType) { Grammar = grammar; }
public GrammarLexerRule(string tokenType, IGrammar grammar) : this(new TokenType(tokenType), grammar) { }
static EbnfGrammar() { BaseLexerRule settingIdentifier = CreateSettingIdentifierLexerRule(), notDoubleQuote = CreateNotDoubleQuoteLexerRule(), notSingleQuote = CreateNotSingleQuoteLexerRule(), identifier = CreateIdentifierLexerRule(), any = new TerminalLexerRule(new AnyTerminal(), "."), notCloseBracket = new TerminalLexerRule( new NegationTerminal(new CharacterTerminal(']')), "[^\\]]"), notMeta = CreateNotMetaLexerRule(), escapeCharacter = CreateEscapeCharacterLexerRule(), whitespace = CreateWhitespaceLexerRule(), multiLineComment = CreateMultiLineCommentLexerRule(); ProductionExpression definition = Definition, block = Block, rule = Rule, setting = Setting, lexerRule = LexerRule, qualifiedIdentifier = QualifiedIdentifier, expression = Expression, term = Term, factor = Factor, literal = Literal, grouping = Grouping, repetition = Repetition, optional = Optional, lexerRuleExpression = LexerRuleExpression, lexerRuleTerm = LexerRuleTerm, lexerRuleFactor = LexerRuleFactor; var regexGrammar = new RegexGrammar(); var regexProductionReference = new ProductionReferenceExpression(regexGrammar); definition.Rule = block | block + definition; block.Rule = rule | setting | lexerRule; rule.Rule = qualifiedIdentifier + '=' + expression + ';'; setting.Rule = (Expr) settingIdentifier + '=' + qualifiedIdentifier + ';'; lexerRule.Rule = qualifiedIdentifier + '~' + lexerRuleExpression + ';'; expression.Rule = term | term + '|' + expression; term.Rule = factor | factor + term; factor.Rule = qualifiedIdentifier | literal | '/' + regexProductionReference + '/' | repetition | optional | grouping; literal.Rule = (Expr) '"' + notDoubleQuote + '"' | (Expr)"'" + notSingleQuote + "'"; repetition.Rule = (Expr) '{' + expression + '}'; optional.Rule = (Expr) '[' + expression + ']'; grouping.Rule = (Expr) '(' + expression + ')'; qualifiedIdentifier.Rule = identifier | (Expr)identifier + '.' + qualifiedIdentifier; lexerRuleExpression.Rule = lexerRuleTerm | lexerRuleTerm + '|' + lexerRuleExpression; lexerRuleTerm.Rule = lexerRuleFactor | lexerRuleFactor + lexerRuleTerm; lexerRuleFactor.Rule = literal | '/' + regexProductionReference + '/'; var grammarExpression = new GrammarExpression( definition, new[] { definition, block, rule, setting, lexerRule, expression, term, factor, literal, repetition, optional, grouping, qualifiedIdentifier, lexerRuleExpression, lexerRuleTerm, lexerRuleFactor }, new[] { new LexerRuleModel(whitespace), new LexerRuleModel(multiLineComment) }); _ebnfGrammar = grammarExpression.ToGrammar(); }
public ImportedGrammar(Fixture innerFixture, IGrammar inner) { _innerFixture = innerFixture; _inner = inner; }
public ParseTester(IGrammar grammar) { Grammar = grammar; ParseEngine = new ParseEngine(Grammar); }
public TableGrammar(IGrammar inner) { _inner = inner; }
private void ComputeClosure(IGrammar grammar, HashSet<GeneratorStateItem> items) { // Continue to loop until new more elements are added to the state. // bool stateModified = true; while (stateModified) { HashSet<GeneratorStateItem> newItems = new HashSet<GeneratorStateItem>(); // Iterate over the current elements in the state and determine (possible) new // elements to be added. // foreach (GeneratorStateItem item in items) { LanguageElementType languageElement = item.RuleItem.DotElement; if (languageElement != null && languageElement.ElementType == LanguageElementTypes.Nonterminal) { NonterminalType nonterminal = (NonterminalType)languageElement; foreach (RuleType rule in nonterminal.Rules) { GeneratorStateItem newItem = new GeneratorStateItem(new GeneratorRuleItem(rule, 0)); newItems.Add(newItem); } } } // Exit loop if all potential new elements already exist in state. Otherwise, add new elements // and repeat process. // if (newItems.IsSubsetOf(items)) { stateModified = false; } else { items.UnionWith(newItems); } } }
private List<GeneratorState> CreateStates(IGrammar grammar) { List<GeneratorState> states = new List<GeneratorState>(); List<GeneratorState> unevaluatedStates = new List<GeneratorState>(); int stateId = 0; // Compute start state. // { HashSet<GeneratorStateItem> items = new HashSet<GeneratorStateItem>(); foreach (RuleType rule in grammar.StartNonterminal.Rules) { items.Add(new GeneratorStateItem(new GeneratorRuleItem(rule, 0))); } ComputeClosure(grammar, items); GeneratorState startState = new GeneratorState(stateId++, items); states.Add(startState); unevaluatedStates.Add(startState); } List<LanguageElementType> languageElements = new List<LanguageElementType>(); languageElements.AddRange(grammar.GetTerminals()); languageElements.AddRange(grammar.GetNonterminals()); while (unevaluatedStates.Count > 0) { // Remove one of the evaluated states and process it. // GeneratorState state = unevaluatedStates[0]; unevaluatedStates.RemoveAt(0); foreach (LanguageElementType languageElement in languageElements) { HashSet<GeneratorStateItem> items = state.Apply(languageElement); if (items != null) { ComputeClosure(grammar, items); GeneratorState toState = null; foreach (GeneratorState existingState in states) { if (existingState.Items.SetEquals(items)) { toState = existingState; break; } } if (toState == null) { toState = new GeneratorState(stateId++, items); states.Add(toState); unevaluatedStates.Add(toState); } state.Transitions.Add(languageElement, toState); } } } if (LinguaTrace.TraceSource.Switch.ShouldTrace(TraceEventType.Information)) { foreach (GeneratorState state in states) { LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_STATE, "{0}", state); } } return states; }
/// <summary> /// Constructs a new <see cref="Parser"/> which can recoganize the specified <see cref="IGrammar"/>. /// </summary> /// <param name="grammar">The <see cref="IGrammar"/> to be recognized by the <see cref="Parser"/>.</param> /// <returns>A <see cref="ParserGeneratorResult"/> containing <see cref="Parser"/> and information pertaining to the /// success or failure of the generation process. /// </returns> public ParserGeneratorResult GenerateParser(IGrammar grammar) { List<ParserGeneratorParserConflict> conflicts = new List<ParserGeneratorParserConflict>(); List<GeneratorState> states = CreateStates(grammar); // Create a parser state for each generator state. // Dictionary<GeneratorState, ParserState> parserStates = new Dictionary<GeneratorState, ParserState>(); foreach (GeneratorState state in states) { parserStates.Add(state, new ParserState(state.Id)); } foreach (GeneratorState state in states) { LinguaTrace.TraceEvent(TraceEventType.Verbose, LinguaTraceId.ID_GENERATE_PROCESS_STATE, "{0}", state); List<GeneratorStateItem> items = new List<GeneratorStateItem>(state.Items); items.Sort(); // Construct the list of actions associated with the parser state. // Dictionary<TerminalType, ParserAction> actions = new Dictionary<TerminalType, ParserAction>(); Dictionary<ParserAction, GeneratorRuleItem> actionRules = new Dictionary<ParserAction, GeneratorRuleItem>(); foreach (GeneratorStateItem item in items) { LinguaTrace.TraceEvent(TraceEventType.Verbose, LinguaTraceId.ID_GENERATE_PROCESS_ITEM, "{0}", item); if (item.RuleItem.DotElement == null) { foreach (TerminalType terminal in item.RuleItem.Rule.Lhs.Follow) { LinguaTrace.TraceEvent(TraceEventType.Verbose, LinguaTraceId.ID_GENERATE_PROCESS_TERMINAL, "{0}", terminal); if (actions.ContainsKey(terminal)) { ParserGeneratorParserConflict conflict = new ParserGeneratorParserConflict( actionRules[actions[terminal]].ToString(), item.RuleItem.ToString()); LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_PROCESS_CONFLICT, "{0}", conflict); conflicts.Add(conflict); } else if (item.RuleItem.Rule.Lhs.IsStart && terminal.IsStop) { ParserAction action = new ParserActionAccept(item.RuleItem.Rule); LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_PROCESS_ACTION, "{0}", action); actions.Add(terminal, action); actionRules.Add(action, item.RuleItem); } else { ParserAction action = new ParserActionReduce(item.RuleItem.Rule); LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_PROCESS_ACTION, "{0}", action); actions.Add(terminal, action); actionRules.Add(action, item.RuleItem); } } } else if (item.RuleItem.DotElement.ElementType == LanguageElementTypes.Terminal) { TerminalType terminal = (TerminalType)item.RuleItem.DotElement; if (actions.ContainsKey(terminal)) { ParserGeneratorParserConflict conflict = new ParserGeneratorParserConflict( actionRules[actions[terminal]].ToString(), item.RuleItem.ToString()); LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_PROCESS_CONFLICT, "{0}", conflict); conflicts.Add(conflict); } else { ParserAction action = new ParserActionShift(parserStates[state.Transitions[terminal]]); LinguaTrace.TraceEvent(TraceEventType.Information, LinguaTraceId.ID_GENERATE_PROCESS_ACTION, "{0}", action); actions.Add(terminal, action); actionRules.Add(action, item.RuleItem); } } } // Construct the GOTO table // Dictionary<NonterminalType, ParserState> gotos = new Dictionary<NonterminalType, ParserState>(); foreach (KeyValuePair<LanguageElementType, GeneratorState> transition in state.Transitions) { if (transition.Key.ElementType == LanguageElementTypes.Nonterminal) { NonterminalType nonterminal = (NonterminalType)transition.Key; gotos.Add(nonterminal, parserStates[transition.Value]); } } // Update the parser state. // ParserState parserState = parserStates[state]; foreach (KeyValuePair<TerminalType, ParserAction> action in actions) { parserState.Actions.Add(action.Key, action.Value); } foreach (KeyValuePair<NonterminalType, ParserState> gotoItem in gotos) { parserState.Gotos.Add(gotoItem.Key, gotoItem.Value); } } Parser parser = new Parser(parserStates[states[0]]); ParserGeneratorResult result = new ParserGeneratorResult(parser, conflicts); return result; }
public ProxyGrammar(IGrammar innerGrammar) { InnerGrammar = innerGrammar; }
public void SetUp() { var fixture = new TableGrammarFixture(); grammar = fixture["DoSomething"]; }
public RegexTests() { _regexGrammar = new RegexGrammar(); }
/// <summary> /// Usage of defaults /// </summary> /// <param name="template"></param> /// <param name="inner"></param> /// <param name="defaults">key:value, key:value, key:value</param> public CurriedLineGrammar(string template, IGrammar inner, string defaults) { _template = template; _inner = inner; _defaults = Step.ParseValues(defaults); }
public ParseEngine(IGrammar grammar) : this(grammar, new ParseEngineOptions(optimizeRightRecursion: true)) { }