private bool TryRecognizeSimpleLiteralExpression( FullyQualifiedName fullyQualifiedName, EbnfLexerRuleExpression ebnfLexerRule, out ILexerRule lexerRule) { lexerRule = null; if (ebnfLexerRule.NodeType != EbnfNodeType.EbnfLexerRuleExpression) { return(false); } var term = ebnfLexerRule.Term; if (term.NodeType != EbnfNodeType.EbnfLexerRuleTerm) { return(false); } var factor = term.Factor; if (factor.NodeType != EbnfNodeType.EbnfLexerRuleFactorLiteral) { return(false); } var literal = factor as EbnfLexerRuleFactorLiteral; lexerRule = new StringLiteralLexerRule( literal.Value, new TokenType(fullyQualifiedName.FullName)); return(true); }
public void Reset(IStringLiteralLexerRule newLiteral, int position) { LexerRule = newLiteral; _index = 0; _capture = null; Position = position; Literal = newLiteral.Literal; }
public void Reset(ITerminalLexerRule terminalLexerRule, int position) { LexerRule = terminalLexerRule; Terminal = terminalLexerRule.Terminal; _captureRendered = false; _isAccepted = false; Position = position; }
public ILexeme Create(ILexerRule lexerRule) { if (lexerRule.LexerRuleType != LexerRuleType) throw new Exception( string.Format( "Unable to create DfaLexeme from type {0}. Expected DfaLexerRule", lexerRule.GetType().FullName)); var dfaLexerRule = lexerRule as IDfaLexerRule; return new DfaLexeme(dfaLexerRule.Start, dfaLexerRule.TokenType); }
public ILexeme Create(ILexerRule lexerRule) { if (lexerRule.LexerRuleType != LexerRuleType) throw new Exception( string.Format( "Unable to create TerminalLexeme from type {0}. Expected TerminalLexerRule", lexerRule.GetType().FullName)); var terminalLexerRule = lexerRule as ITerminalLexerRule; return new TerminalLexeme(terminalLexerRule); }
public Grammar(INonTerminal start, IProduction[] productions, ILexerRule[] lexerRules, ILexerRule[] ignore) { Assert.IsNotNullOrEmpty(productions, "productions"); Assert.IsNotNull(start, "start"); _productionIndex = CreateProductionIndex(productions); _lexerRuleIndex = CreateLexerRuleIndex(lexerRules); Productions = new ReadOnlyList<IProduction>(productions ?? EmptyProductionArray); LexerRules = new ReadOnlyList<ILexerRule>(lexerRules ?? EmptyLexerRuleArray); Ignores = new ReadOnlyList<ILexerRule>(ignore ?? EmptyLexerRuleArray); Start = start; }
private void AddIgnoreRuletoIndex(ILexerRule lexerRule) { var key = HashCode.Compute( ((int)lexerRule.SymbolType).GetHashCode(), lexerRule.TokenType.Id.GetHashCode()); if (!_ignoreIndex.ContainsKey(key)) { _ignoreIndex.Add(key, new List <ILexerRule>()); } _ignoreIndex[key].Add(lexerRule); }
public ILexeme Create(ILexerRule lexerRule) { if (lexerRule.LexerRuleType != LexerRuleType) throw new Exception( string.Format( "Unable to create ParseEngineLexeme from type {0}. Expected TerminalLexerRule", lexerRule.GetType().FullName)); var grammarLexerRule = lexerRule as IGrammarLexerRule; var parseEngine = new ParseEngine(grammarLexerRule.Grammar); return new ParseEngineLexeme(parseEngine, grammarLexerRule.TokenType); }
public IEnumerable <Token> ToTokens(IEnumerable <char> src) { var walker = new StringWalker(src); var rules = new ILexerRule[] { new NumberRule(), new KeywordRule(_keywords), new WordRule(), new StringRule(), new SingleCharSymbolRule() }; while (!walker.IsEmpty()) { char c = walker.Peek(); // Skip all whitespace if (char.IsWhiteSpace(c)) { walker.Consume(1); continue; } // Get the standard rules out of the way first. bool handledByStandardRule = false; foreach (var rule in rules) { if (rule.IsStartOfToken(walker)) { handledByStandardRule = true; yield return(rule.ConsumeToken(walker)); break; } } if (handledByStandardRule) { continue; } // TODO: multi-character symbol tokens // TODO: Special cases go here // We didn't find any rule that matches what we're seeing, // so throw an error. CodePos pos = walker.Position; throw new CompileErrorException(pos, $"Unexpected character '{c}'"); } }
public ILexeme Create(ILexerRule lexerRule) { if (lexerRule.LexerRuleType != LexerRuleType) throw new Exception( $"Unable to create TerminalLexeme from type {lexerRule.GetType().FullName}. Expected TerminalLexerRule"); var terminalLexerRule = lexerRule as ITerminalLexerRule; if (_queue.Count == 0) return new TerminalLexeme(terminalLexerRule); var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(terminalLexerRule); return reusedLexeme; }
public ILexeme Create(ILexerRule lexerRule) { if (lexerRule.LexerRuleType != LexerRuleType) throw new Exception( $"Unable to create DfaLexeme from type {lexerRule.GetType().FullName}. Expected DfaLexerRule"); var dfaLexerRule = lexerRule as IDfaLexerRule; if (_queue.Count > 0) { var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(dfaLexerRule); return reusedLexeme; } return new DfaLexeme(dfaLexerRule); }
public IList<ILexerRule> GetRules() { var binaryOperatorRules = operatorListFactory.GetBinaryOperators() .Select(b => new BinaryOperatorRule(b)); var unaryOperatorRules = operatorListFactory.GetUnaryOperators() .Select(u => new UnaryOperatorRule(u)); var baseRules = new ILexerRule[] { new BracketRule(), new NumberRule() }; return baseRules.Concat(binaryOperatorRules) .Concat(unaryOperatorRules) .ToList(); }
private ILexerRule LexerRuleExpression( FullyQualifiedName fullyQualifiedName, EbnfLexerRuleExpression ebnfLexerRule) { ILexerRule lexerRule = null; if (TryRecognizeSimpleLiteralExpression(fullyQualifiedName, ebnfLexerRule, out lexerRule)) { return(lexerRule); } var nfa = LexerRuleExpression(ebnfLexerRule); var dfa = _nfaToDfaAlgorithm.Transform(nfa); return(new DfaLexerRule(dfa, fullyQualifiedName.FullName)); }
public ILexeme Create(ILexerRule lexerRule, int position) { if (lexerRule.LexerRuleType != LexerRuleType) { throw new Exception( $"Unable to create DfaLexeme from type {lexerRule.GetType().FullName}. Expected DfaLexerRule"); } var dfaLexerRule = lexerRule as IDfaLexerRule; if (_queue.Count > 0) { var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(dfaLexerRule); return(reusedLexeme); } return(new DfaLexeme(dfaLexerRule, position)); }
public ILexeme Create(ILexerRule lexerRule, ICapture <char> segment, int offset) { if (lexerRule.LexerRuleType != LexerRuleType) { throw new Exception( $"Unable to create StringLiteralLexeme from type {lexerRule.GetType().FullName}. Expected StringLiteralLexerRule"); } var stringLiteralLexerRule = lexerRule as IStringLiteralLexerRule; if (_queue.Count == 0) { return(new StringLiteralLexeme(stringLiteralLexerRule, segment, offset)); } var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(stringLiteralLexerRule, offset); return(reusedLexeme); }
public ILexeme Create(ILexerRule lexerRule, ICapture <char> segment, int offset) { if (lexerRule.LexerRuleType != LexerRuleType) { throw new Exception( $"Unable to create DfaLexeme from type {lexerRule.GetType().FullName}. Expected DfaLexerRule"); } var dfaLexerRule = lexerRule as IDfaLexerRule; if (_queue.Count > 0) { var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(dfaLexerRule, offset); return(reusedLexeme); } var dfaLexeme = new DfaLexeme(dfaLexerRule, segment, offset); return(dfaLexeme); }
public ILexeme Create(ILexerRule lexerRule, int position) { if (lexerRule.LexerRuleType != LexerRuleType) { throw new Exception( $"Unable to create ParseEngineLexeme from type {lexerRule.GetType().FullName}. Expected TerminalLexerRule"); } var grammarLexerRule = lexerRule as IGrammarLexerRule; if (_queue.Count == 0) { return(new ParseEngineLexeme(grammarLexerRule)); } var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(grammarLexerRule); return(reusedLexeme); }
public ILexeme Create(ILexerRule lexerRule, ICapture <char> segment, int offset) { if (!LexerRuleType.Equals(lexerRule.LexerRuleType)) { throw new Exception( $"Unable to create TerminalLexeme from type {lexerRule.GetType().FullName}. Expected TerminalLexerRule"); } var terminalLexerRule = lexerRule as ITerminalLexerRule; if (_queue.Count == 0) { return(new TerminalLexeme(terminalLexerRule, segment, offset)); } var reusedLexeme = _queue.Dequeue(); reusedLexeme.Reset(terminalLexerRule, offset); return(reusedLexeme); }
public LexerRuleModel(ILexerRule value) { Value = value; }
private void AddIgnoreRule(ILexerRule lexerRule) { _ignores.Add(lexerRule); AddIgnoreRuletoIndex(lexerRule); }
public ForestNodeVisitorTests() { _whitespace = CreateWhitespaceRule(); }
public int GetLexerRuleIndex(ILexerRule lexerRule) { return(_lexerRules.IndexOf(lexerRule)); }
private void AddLexerRule(ILexerRule lexerRule) { _lexerRules.Add(lexerRule); }
private ILexeme CreateLexemeForLexerRule(ILexerRule lexerRule) { return _lexemeFactoryRegistry .Get(lexerRule.LexerRuleType) .Create(lexerRule); }
public IReadOnlyList <ILexerRule> GetExpectedLexerRules() { var frameSets = _chart.Sets; var frameSetCount = frameSets.Count; if (frameSetCount == 0) { return(EmptyLexerRules); } var hashCode = 0; var count = 0; if (_expectedLexerRuleIndicies == null) { _expectedLexerRuleIndicies = new BitArray(Grammar.LexerRules.Count); } else { _expectedLexerRuleIndicies.SetAll(false); } var frameSet = frameSets[frameSets.Count - 1]; for (var i = 0; i < frameSet.States.Count; i++) { var stateFrame = frameSet.States[i]; for (int j = 0; j < stateFrame.DottedRuleSet.ScanKeys.Count; j++) { var lexerRule = stateFrame.DottedRuleSet.ScanKeys[j]; var index = Grammar.GetLexerRuleIndex(lexerRule); if (index < 0) { continue; } if (_expectedLexerRuleIndicies[index]) { continue; } _expectedLexerRuleIndicies[index] = true; hashCode = HashCode.ComputeIncrementalHash(lexerRule.GetHashCode(), hashCode, count == 0); count++; } } if (_expectedLexerRuleCache == null) { _expectedLexerRuleCache = new Dictionary <int, ILexerRule[]>(); } // if the hash is found in the cached lexer rule lists, return the cached array ILexerRule[] cachedLexerRules = null; if (_expectedLexerRuleCache.TryGetValue(hashCode, out cachedLexerRules)) { return(cachedLexerRules); } // compute the new lexer rule array and add it to the cache var array = new ILexerRule[count]; var returnItemIndex = 0; for (var i = 0; i < Grammar.LexerRules.Count; i++) { if (_expectedLexerRuleIndicies[i]) { array[returnItemIndex] = Grammar.LexerRules[i]; returnItemIndex++; } } _expectedLexerRuleCache.Add(hashCode, array); return(array); }
public IReadOnlyList <ILexerRule> GetExpectedLexerRules() { var earleySets = _chart.EarleySets; var currentIndex = earleySets.Count - 1; var currentEarleySet = earleySets[currentIndex]; var scanStates = currentEarleySet.Scans; if (scanStates.Count == 0) { return(EmptyLexerRules); } var hashCode = 0; var count = 0; if (_expectedLexerRuleIndicies == null) { _expectedLexerRuleIndicies = new BitArray(Grammar.LexerRules.Count); } else { _expectedLexerRuleIndicies.SetAll(false); } // compute the lexer rule hash for caching the list of lexer rules // compute the unique lexer rule count // set bits in the rule index bit array corresponding to the position of the lexer rule in the list of rules for (int s = 0; s < scanStates.Count; s++) { var scanState = scanStates[s]; var postDotSymbol = scanState.DottedRule.PostDotSymbol; if (postDotSymbol == null || postDotSymbol.SymbolType != SymbolType.LexerRule) { continue; } var lexerRule = postDotSymbol as ILexerRule; var index = Grammar.GetLexerRuleIndex(lexerRule); if (index < 0) { continue; } if (_expectedLexerRuleIndicies[index]) { continue; } count++; _expectedLexerRuleIndicies[index] = true; hashCode = HashCode.ComputeIncrementalHash(lexerRule.GetHashCode(), hashCode, hashCode == 0); } if (_expectedLexerRuleCache == null) { _expectedLexerRuleCache = new Dictionary <int, ILexerRule[]>(); } // if the hash is found in the cached lexer rule lists, return the cached array ILexerRule[] cachedLexerRules = null; if (_expectedLexerRuleCache.TryGetValue(hashCode, out cachedLexerRules)) { return(cachedLexerRules); } // compute the new lexer rule array and add it to the cache var array = new ILexerRule[count]; var returnItemIndex = 0; for (var i = 0; i < Grammar.LexerRules.Count; i++) { if (_expectedLexerRuleIndicies[i]) { array[returnItemIndex] = Grammar.LexerRules[i]; returnItemIndex++; } } _expectedLexerRuleCache.Add(hashCode, array); return(array); }
public int GetLexerRuleIndex(ILexerRule lexerRule) { return(_innerGrammar.GetLexerRuleIndex(lexerRule)); }
private void AddIgnoreRuletoIndex(ILexerRule lexerRule) { var key = HashCode.Compute( lexerRule.SymbolType.GetHashCode(), lexerRule.TokenType.Id.GetHashCode()); if (!_ignoreIndex.ContainsKey(key)) _ignoreIndex.Add(key, new ReadWriteList<ILexerRule>()); _ignoreIndex[key].Add(lexerRule); }
private ILexemeFactory GetLexemeFactory(ILexerRule lexerRule) { return _lexemeFactoryRegistry .Get(lexerRule.LexerRuleType); }
private object GetRuleWeight(ILexerRule rule) { return(rule.Weight); }
public IGrammarBuilder Ignore(ILexerRule lexerRule) { _ignoreRules.Add(lexerRule); return this; }
public IGrammarBuilder LexerRule(ILexerRule lexerRule) { _lexerRules.Add(lexerRule); return this; }