private First GenerateFirst(Term term) { var currentFirst = Firsts.Single(x => x.NonTerminal == term.Caller); foreach (var production in term.Productions) { if (production.FirstElementIsSpaceToken()) { continue; } if (production.FirstElementIsTerminal() || production.FirstElementIsEmptyToken()) { currentFirst.AddTerminal(production.FirstToken.ToTerminalToken()); } else { var termOfNonTerminal = GetTermByElement(production.FirstToken); var firstsOfNonTerminal = GenerateFirst(termOfNonTerminal).RemoveEmpty().Terminals; currentFirst.AddTerminal(firstsOfNonTerminal); var productionElements = production.ElementsWithoutSpace.ToArray(); for (var i = 1; i < productionElements.Length; i++) { var X1 = GetTermByElement(productionElements[i - 1]); var X2 = GetTermByElement(productionElements[i]); if (X1?.AnyEmptyProduction() ?? false && X2 != null) { currentFirst.AddTerminal(GenerateFirst(X2).RemoveEmpty().Terminals); } } var all = productionElements.Select(GetTermByElement).ToList(); if (all.All(x => x?.AnyEmptyProduction() ?? false)) { currentFirst.AddTerminal(EmptyToken.ToTerminal()); } } } return(currentFirst); }
public Token GetToken() { State = LexicAnalyserState.Initial; Value = HasNext ? Character.ToString() : ""; Token token = null; while (HasNext && token == null) { switch (State) { case LexicAnalyserState.Initial: HandleInitial(); break; case LexicAnalyserState.NonTerminal: token = new NonTerminalToken(Value); CurrentIndex++; break; case LexicAnalyserState.Space: token = new SpaceToken(); CurrentIndex++; break; case LexicAnalyserState.Terminal: token = new TerminalToken(Value); CurrentIndex++; break; case LexicAnalyserState.Empty: token = new EmptyToken(); CurrentIndex++; break; case LexicAnalyserState.NewLine: token = new NewLineToken(Value); CurrentIndex++; break; case LexicAnalyserState.Identifier: token = new IdentifierToken(Value); CurrentIndex++; break; case LexicAnalyserState.Number: token = new NumberToken(Value); CurrentIndex++; break; case LexicAnalyserState.Var: token = new VarToken(); CurrentIndex++; break; case LexicAnalyserState.Write: token = new WriteToken(); CurrentIndex++; break; case LexicAnalyserState.Read: token = new ReadToken(); CurrentIndex++; break; case LexicAnalyserState.If: token = new IfToken(); CurrentIndex++; break; case LexicAnalyserState.End: token = new EndToken(); CurrentIndex++; break; case LexicAnalyserState.Then: token = new ThenToken(); CurrentIndex++; break; case LexicAnalyserState.Begin: token = new BeginToken(); CurrentIndex++; break; case LexicAnalyserState.While: token = new WhileToken(); CurrentIndex++; break; case LexicAnalyserState.Do: token = new DoToken(); CurrentIndex++; break; case LexicAnalyserState.SemiColon: token = new SemiColonToken(); CurrentIndex++; break; case LexicAnalyserState.OpenParentheses: token = new OpenParenthesesToken(); CurrentIndex++; break; case LexicAnalyserState.CloseParentheses: token = new CloseParenthesesToken(); CurrentIndex++; break; case LexicAnalyserState.Plus: token = new PlusToken(); CurrentIndex++; break; case LexicAnalyserState.Sub: token = new SubToken(); CurrentIndex++; break; case LexicAnalyserState.Great: token = new GreatToken(); CurrentIndex++; break; case LexicAnalyserState.Less: token = new LessToken(); CurrentIndex++; break; case LexicAnalyserState.Equal: token = new EqualToken(); CurrentIndex++; break; case LexicAnalyserState.Repeat: token = new RepeatToken(); CurrentIndex++; break; case LexicAnalyserState.Until: token = new UntilToken(); CurrentIndex++; break; case LexicAnalyserState.Attribution: token = new AttributionToken(); CurrentIndex++; break; case LexicAnalyserState.NotEqual: token = new NotEqualToken(); CurrentIndex++; break; case LexicAnalyserState.GreatOrEqual: token = new GreatOrEqualToken(); CurrentIndex++; break; case LexicAnalyserState.LessOrEqual: token = new LessOrEqualToken(); CurrentIndex++; break; default: throw new ArgumentOutOfRangeException(); } } return(token); }