public void GrammarFindNodeTest() { Grammar g = new Grammar(2); Assert.IsNotNull(g.Origin); g.Add(new int[] { 1, 2, 3 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[3]); g.Add(new int[] { 1, 2, 4 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[4]); var node = g.FindNodesByWordId(4); Assert.AreEqual(g.Origin.Followers[1].Followers[2].Followers[4], node); }
public static void InitGrammar() { //Program rule Grammar.Add(new GrammarReplaceRule(TokenEnum.Program, new CompositeToken { TokenEnum.ProgramName, TokenEnum.NewLine(), TokenEnum.String("var"), TokenEnum.DefList1, TokenEnum.NewLine(), TokenEnum.String("begin"), TokenEnum.StatementList1, TokenEnum.String("end") })); //Definition list DefinitionList(); //Statement list StatementList(); //Unlabeled operator UnlabeledOperator(); IdList(); //Expression FillExpression(); FillLogicalExpression(); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Declaration, new SubProduction ( new List <ExpressionDefinition> { new TerminalExpressionDefinition { TokenType = TokenType.TypeDeclaration }, new TerminalExpressionDefinition { TokenType = TokenType.Identifier }, new SemanticActionDefinition((ParsingNode node) => { string type = node.GetAttributeForKey <WordToken>(ParserConstants.TypeDeclaration, ParserConstants.Token).Lexeme; SymbolTable symbolTable = node.FirstParentWithAttribute(ParserConstants.SymTable).GetAttribute <SymbolTable>(ParserConstants.SymTable); string key = node.GetAttributeForKey <WordToken>("Identifier", ParserConstants.Token).Lexeme; SymbolTableEntryType symbolEntryType = SymbolTable.StringToSymbolTableEntryType(type); SymbolTableEntry entry = symbolTable.Create(key, symbolEntryType); DeclarationASTNode syntaxTreeNode = new DeclarationASTNode(); syntaxTreeNode.SymbolTableEntry = entry; node.Attributes.Add(ParserConstants.SyntaxTreeNode, syntaxTreeNode); }) } ) ));; }
private static void StatementList() { Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList, new CompositeToken { TokenEnum.StatementList, TokenEnum.NewLine(), TokenEnum.Statement })); Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList, new CompositeToken { TokenEnum.NewLine(), TokenEnum.Statement })); Grammar.Add(new GrammarReplaceRule(TokenEnum.Statement, new CompositeToken { TokenEnum.UnlabeledStatement })); Grammar.Add(new GrammarReplaceRule(TokenEnum.Statement, new CompositeToken { TokenEnum.Label(), TokenEnum.String(":"), TokenEnum.UnlabeledStatement })); Grammar.Add(new GrammarReplaceRule(TokenEnum.StatementList1, new CompositeToken { TokenEnum.StatementList, TokenEnum.NewLine() })); }
private static void DefinitionList() { Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList, new CompositeToken { TokenEnum.DefList, TokenEnum.String(";"), TokenEnum.Def })); Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList, new CompositeToken { TokenEnum.Def })); Grammar.Add(new GrammarReplaceRule(TokenEnum.DefList1, new CompositeToken { TokenEnum.DefList })); Grammar.Add(new GrammarReplaceRule(TokenEnum.Def, new CompositeToken { TokenEnum.IdList1, TokenEnum.String(":"), TokenEnum.String("float") })); }
public void GrammarFindWordTest() { Grammar g = new Grammar(2); Assert.IsNotNull(g.Origin); g.Add(new int[] { 1, 2, 3 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[3]); g.Add(new int[] { 1, 2, 4 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[4]); var path = g.FindWord(new int[] { 1, 2, 4 }); Assert.AreEqual(g.Origin, path[0]); Assert.AreEqual(g.Origin.Followers[1], path[1]); Assert.AreEqual(g.Origin.Followers[1].Followers[2], path[2]); Assert.AreEqual(g.Origin.Followers[1].Followers[2].Followers[4], path[3]); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Select, new List <SubProduction> { GetInsertRule() })); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.CommandRule, new List <SubProduction> { new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.Select }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes.Add(ParserConstants.SyntaxTreeNode, node.GetAttributeForKey <SyntaxTreeNode>(ParserConstants.Select, ParserConstants.SyntaxTreeNode)); }) } ), new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.Insert }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes.Add(ParserConstants.SyntaxTreeNode, node.GetAttributeForKey <SyntaxTreeNode>(ParserConstants.Insert, ParserConstants.SyntaxTreeNode)); }) } ) })); }
private static Token AddToken(Tag tag) { Token TOKEN = new Token(tag, GetLexeme(), ROW, COLUMN); Grammar.Add(TOKEN, TOKEN.Lexeme); return(TOKEN); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.NumericExpression, new List <SubProduction> { TermRule() })); grammar.Add(new Production(ParserConstants.NumericTerm, new List <SubProduction> { // * and / NumericFactorRule() })); grammar.Add(new Production(ParserConstants.NumericFactor, new List <SubProduction> { IdentifierRule(), ParenthesisRule(), IntegerRule() })); }
private static void Assignment() { Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken { TokenEnum.Id(), TokenEnum.String("="), TokenEnum.Expression1 })); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.StringExpression, new List <SubProduction> { StringRule() } )); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Boolean, new List <SubProduction> { BooleanRules() } )); }
public void GrammarAddTest() { Grammar g = new Grammar(2); Assert.IsNotNull(g.Origin); //Добавим одну цепочку 1 2 3 g.Add(new int[] { 1, 2, 3 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[3]); //Добавим цепочку 1 2 4 g.Add(new int[] { 1, 2, 4 }); Assert.IsNotNull(g.Origin.Followers[1].Followers[2].Followers[4]); //Добавим цепочки, сединенные в последнем элементе g.Add(new int[] { 1, 2, 3, 5 }); g.Add(new int[] { 1, 2, 4, 5 }); //Проверим последний элемент 5 var node5_1 = g.Origin.Followers[1].Followers[2].Followers[3].Followers[5]; var node5_2 = g.Origin.Followers[1].Followers[2].Followers[4].Followers[5]; Assert.AreEqual(node5_1, node5_2); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.BooleanExpression, new List <SubProduction> { AndRule(), OrRule(), TermRule() } )); grammar.Add(new Production(ParserConstants.BooleanTerm, new List <SubProduction> { ParenthesisRule(), //BooleanRule(), RelopRules() } )); }
private static void If() { Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken { TokenEnum.String("if"), TokenEnum.LogicalExpression1, TokenEnum.String("then"), TokenEnum.String("goto"), TokenEnum.Label() })); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Factor, new List <SubProduction> { IdentifierRule(), FunctionCallRule(), BooleanExpressionRule(), NumExpressionRule() } )); }
private void CommitCategory(string category, List <string> categoryGrammar, string grammarSource) { try { category = category.Trim(); Debug.WriteLine(" - Set cat " + category); Grammar.Add(category, categoryGrammar); } catch (Exception ex) { throw new GrammarLoadingException(String.Format("Error [in {0}] ({1}): {2}", grammarSource, category, ex.Message)); } }
private static void Loop() { Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken { TokenEnum.String("do"), TokenEnum.Id(), TokenEnum.String("="), TokenEnum.Expression1, TokenEnum.String("to"), TokenEnum.Expression2, TokenEnum.StatementList1, TokenEnum.String("next") })); }
private static void Read() { CURRENT_CHAR = '\u0000'; try { LAST_CHAR = Global.SOURCE.ReadByte(); if (LAST_CHAR != EOF) { CURRENT_CHAR = (char)LAST_CHAR; if (IsNewLine()) { ROW++; COLUMN = 1; } else if (CURRENT_CHAR == '\t') { COLUMN += 3; } else if (!IsNewLine()) { COLUMN++; } } else { MultilineCommentErrorCheck(); Token _EOF_ = new Token(Tag.EOF, "<EOF>", ROW, COLUMN); Grammar.Add(_EOF_, _EOF_.Lexeme); Grammar.Show(); Environment.Exit(0); } } catch (IOException e) { Console.WriteLine("[Error]: Failed to read the character '{0}'\n{1}", CURRENT_CHAR, e); Environment.Exit(1); } }
private static void InputOutput() { Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken { TokenEnum.String("readl"), TokenEnum.String("("), TokenEnum.IdList1, TokenEnum.String(")") })); Grammar.Add(new GrammarReplaceRule(TokenEnum.UnlabeledStatement, new CompositeToken { TokenEnum.String("writel"), TokenEnum.String("("), TokenEnum.IdList1, TokenEnum.String(")") })); }
private static void ParseGrammar(StreamReader sr) { //parsing rules from json List <RuleBuffer> rules = (List <RuleBuffer>) new JsonSerializer().Deserialize(sr, typeof(List <RuleBuffer>)); SimpleGrammar = rules; foreach (RuleBuffer rule in rules) { var newRule = new Rule { //set left part of json like a non terminal LeftPart = new LinguisticUnit { Name = rule.LeftPart.Trim(), Type = LinguisticUnitType.NonTerminal } }; //get all right parts string[] rightParts = rule.RightPart.Split(new string[] { "|" }, StringSplitOptions.RemoveEmptyEntries); foreach (string rightPart in rightParts) { var newRightPart = new RightPart(); //get all linguistic units from right part of rule string[] linguisticUnits = rightPart.Split(new string[] { " " }, StringSplitOptions.RemoveEmptyEntries); foreach (string linguisticUnit in linguisticUnits) { newRightPart.LinguisticUnits.Add(new LinguisticUnit { Name = linguisticUnit, //define linguistic unit as terminal or non terminal by reg. exp. Type = new Regex(@"^<.+>$").IsMatch(linguisticUnit) ? LinguisticUnitType.NonTerminal : LinguisticUnitType.Terminal }); } newRule.RightParts.Add(newRightPart); } Grammar.Add(newRule); } }
private static void IdList() { Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList1, new CompositeToken { TokenEnum.IdList })); Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList, new CompositeToken { TokenEnum.String(","), TokenEnum.Id() })); Grammar.Add(new GrammarReplaceRule(TokenEnum.IdList, new CompositeToken { TokenEnum.IdList, TokenEnum.String(","), TokenEnum.Id() })); Grammar.Add(new GrammarReplaceRule(TokenEnum.ProgramName, new CompositeToken { TokenEnum.String("program"), TokenEnum.Id() })); }
// Source Control public static void Set(string source) { Token TOKEN; LEXEME = new StringBuilder(); /* * Adiciona uma nova linha no arquivo como forma de segurança * caso a última linha do arquivo não seja uma linha em branco. * Isso evita o último token/caracter do arquivo de não ser reconhecido. */ if (!String.IsNullOrWhiteSpace(File.ReadAllLines(source).Last())) { File.AppendAllText(source, Environment.NewLine); } SOURCE = new FileStream(source, FileMode.Open, FileAccess.ReadWrite); do { LEXEME.Clear(); TOKEN = NextToken(); if (TOKEN != null) { Console.WriteLine("Token: {0}\t Line: {1}\t Column: {2}", TOKEN.ToString(), ROW, COLUMN); } if (Grammar.GetToken(GetLexeme()) == null && TOKEN != null) { Grammar.Add(TOKEN, new Identifier()); } } while (!TOKEN.Lexeme.Equals(Tag.EOF) && TOKEN != null); SOURCE.Close(); }
// Lexer Control public static void Set() { Token TOKEN; LEXEME = new StringBuilder(); do { LEXEME.Clear(); TOKEN = NextToken(); if (TOKEN != null) { Console.WriteLine("Token: {0}\t Line: {1}\t Column: {2}", TOKEN.ToString(), ROW, COLUMN); } if (Grammar.GetToken(GetLexeme()) == null && TOKEN != null) { Grammar.Add(TOKEN, TOKEN.Lexeme); } } while (!TOKEN.Lexeme.Equals(Tag.EOF) && TOKEN != null); Global.SOURCE.Close(); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Codeblock, new SubProduction ( new List <ExpressionDefinition> { new TerminalExpressionDefinition { TokenType = TokenType.BracketOpen }, new SemanticActionDefinition((ParsingNode node) => { var symbolTableNode = node.FirstParentWithAttribute(ParserConstants.SymTable); if (symbolTableNode == null) { node.Attributes[ParserConstants.SymTable] = node.Parser.RootSymbolTable; } else { node.Attributes[ParserConstants.SymTable] = symbolTableNode.GetAttribute <SymbolTable>(ParserConstants.SymTable).CreateChild(); } }), new NonTerminalExpressionDefinition { Identifier = ParserConstants.Statements }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes.Add(ParserConstants.SyntaxTreeNode, node.GetAttributeForKey <StatementsASTNode>(ParserConstants.Statements, ParserConstants.SyntaxTreeNodes)); }), new TerminalExpressionDefinition { TokenType = TokenType.BracketClose } } ) )); }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Statement, new List <SubProduction> { new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.OpenStatement }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes[ParserConstants.SyntaxTreeNode] = node.GetAttributeForKey <SyntaxTreeNode>(ParserConstants.OpenStatement, ParserConstants.SyntaxTreeNode); }) } ), new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.ClosedStatement }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes[ParserConstants.SyntaxTreeNode] = node.GetAttributeForKey <SyntaxTreeNode>(ParserConstants.ClosedStatement, ParserConstants.SyntaxTreeNode); }) } ) } )); grammar.Add(new Production(ParserConstants.OpenStatement, new List <SubProduction> { If(), IfElse() } )); grammar.Add(new Production(ParserConstants.ClosedStatement, new List <SubProduction> { IfElseClosed(), Declaration(), Codeblock(), Assignment(), FunctionCall(), Return(), While() } )); grammar.Add(new Production(ParserConstants.Factors, new List <SubProduction>() { new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.Factors }, new TerminalExpressionDefinition { TokenType = TokenType.Comma }, new NonTerminalExpressionDefinition { Identifier = ParserConstants.Factor }, new SemanticActionDefinition((ParsingNode node) => { List <FactorASTNode> result = new List <FactorASTNode>(); List <FactorASTNode> factors = node.GetAttributeForKey <List <FactorASTNode> >(ParserConstants.Factors, Factors); FactorASTNode factor = node.GetAttributeForKey <FactorASTNode>(ParserConstants.Factor, ParserConstants.SyntaxTreeNode); result.AddRange(factors); result.Add(factor); node.Attributes.Add(Factors, result); }) } ), new SubProduction ( new List <ExpressionDefinition> { new NonTerminalExpressionDefinition { Identifier = ParserConstants.Factor }, new SemanticActionDefinition((ParsingNode node) => { List <FactorASTNode> factors = new List <FactorASTNode>(); FactorASTNode factor = node.GetAttributeForKey <FactorASTNode>(ParserConstants.Factor, ParserConstants.SyntaxTreeNode); factors.Add(factor); node.Attributes.Add(Factors, factors); }) } ), new SubProduction ( new List <ExpressionDefinition> { new TerminalExpressionDefinition { TokenType = TokenType.EmptyString }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes.Add(Factors, new List <FactorASTNode>()); }) } ) } )); }
private static void AddSemanticToken(string lexeme) { Grammar.Add(TOKEN, lexeme); }
// PasC:Automata public static Token NextToken() { STATE = 0; LEXEME = new StringBuilder(); while (true) { CURRENT_CHAR = '\u0000'; try { LAST_CHAR = Global.SOURCE.ReadByte(); if (LAST_CHAR != EOF) { CURRENT_CHAR = (char)LAST_CHAR; if (IsNewLine()) { ROW++; COLUMN = 1; } else if (CURRENT_CHAR == '\t') { COLUMN += 3; } else if (!IsNewLine()) { COLUMN++; } } else { MultilineCommentErrorCheck(); Token _EOF_ = new Token(Tag.EOF, "<EOF>", ROW, COLUMN); Grammar.Add(_EOF_, _EOF_.Lexeme); return(_EOF_); } } catch (IOException e) { Console.WriteLine("[Error]: Failed to read the character '{0}'\n{1}", CURRENT_CHAR, e); Environment.Exit(1); } switch (STATE) { // State0 case 0: { // ->> 0 if (Char.IsWhiteSpace(CURRENT_CHAR)) { SetState(0, false); } // ->> 0 else if (IsNewLine()) { SetState(0, false); } // -> 1 else if (Char.IsDigit(CURRENT_CHAR)) { SetState(1, true); } // -> 6 else if (CURRENT_CHAR.Equals('\'')) { SetState(6, true); } // -> 9 else if (CURRENT_CHAR.Equals('\"')) { SetState(9, true); } // -> 12 else if (Char.IsLetter(CURRENT_CHAR)) { SetState(12, true); } // -> 14 else if (CURRENT_CHAR.Equals('=')) { SetState(14, true); } // -> 17 else if (CURRENT_CHAR.Equals('>')) { SetState(17, true); } // -> 20 else if (CURRENT_CHAR.Equals('<')) { SetState(20, true); } // -> 23 else if (CURRENT_CHAR.Equals('!')) { SetState(23, true); } // -> 25 else if (CURRENT_CHAR.Equals('/')) { SetState(25, true); } // -> (31) else if (CURRENT_CHAR.Equals('*')) { SetState(0, true); return(AddToken(Tag.OP_MUL)); } // -> (32) else if (CURRENT_CHAR.Equals('+')) { SetState(0, true); return(AddToken(Tag.OP_AD)); } // -> (33) else if (CURRENT_CHAR.Equals('-')) { SetState(0, true); return(AddToken(Tag.OP_MIN)); } // -> (34) else if (CURRENT_CHAR.Equals('{')) { SetState(0, true); return(AddToken(Tag.SMB_OBC)); } // -> (35) else if (CURRENT_CHAR.Equals('}')) { SetState(0, true); return(AddToken(Tag.SMB_CBC)); } // -> (36) else if (CURRENT_CHAR.Equals('(')) { SetState(0, true); return(AddToken(Tag.SMB_OPA)); } // -> (37) else if (CURRENT_CHAR.Equals(')')) { SetState(0, true); return(AddToken(Tag.SMB_CPA)); } // -> (38) else if (CURRENT_CHAR.Equals(',')) { SetState(0, true); return(AddToken(Tag.SMB_COM)); } // -> (39) else if (CURRENT_CHAR.Equals(';')) { SetState(0, true); return(AddToken(Tag.SMB_SEM)); } // NONE else { LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 1 case 1: { // ->> 1 if (Char.IsDigit(CURRENT_CHAR)) { SetState(1, true); } // -> 3 else if (CURRENT_CHAR.Equals('.')) { SetState(3, true); } // -> (2) [Other] else { Restart(); return(AddToken(Tag.CON_NUM)); } } break; // State 3 case 3: { // -> 4 if (Char.IsDigit(CURRENT_CHAR)) { SetState(4, true); } // -> [Error] else { LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 4 case 4: { // ->> 4 if (Char.IsDigit(CURRENT_CHAR)) { SetState(4, true); } // -> (5) [Other] else { Restart(); return(AddToken(Tag.CON_NUM)); } } break; // State 6 case 6: { // -> 7 if (IsASCII(CURRENT_CHAR)) { SetState(7, true); } // -> [Error] else { LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 7 case 7: { // -> (8) if (CURRENT_CHAR.Equals('\'')) { SetState(0, true); return(AddToken(Tag.CON_CHAR)); } // -> [Error] else { LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 9 case 9: { // -> 10 if (IsASCII(CURRENT_CHAR)) { SetState(10, true); } // -> 0 [Error] else { SetState(0, false); LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 10 case 10: { // -> (11) if (CURRENT_CHAR.Equals('\"')) { SetState(0, true); QUOTES_ERROR = false; return(AddToken(Tag.LIT)); } else if (IsNewLine() && !QUOTES_ERROR) { SetState(10, false); QUOTES_ERROR = true; LexicalError(String.Format("Missed closing quotes on line {0} and column {1}.", ROW - 1, COLUMN)); } // ->> 10 else if (IsASCII(CURRENT_CHAR)) { if (!QUOTES_ERROR) { SetState(10, true); } else { LexicalError(String.Format("Invalid character '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } } break; // State 12 case 12: { // ->> 12 if (Char.IsLetter(CURRENT_CHAR) || Char.IsDigit(CURRENT_CHAR)) { SetState(12, true); if (Grammar.GetToken(GetLexeme()) != null) { return(Grammar.GetToken(GetLexeme())); } } // -> (13) [Other] else { Restart(); return(AddToken(Tag.ID)); } } break; // State 14 case 14: { // -> (15) if (CURRENT_CHAR.Equals('=')) { SetState(0, true); return(AddToken(Tag.OP_EQ)); } // -> (16) [Other] else { Restart(); return(AddToken(Tag.OP_ASS)); } } // State 17 case 17: { // -> (18) if (CURRENT_CHAR.Equals('=')) { SetState(0, true); return(AddToken(Tag.OP_GE)); } // -> (19) [Other] else { Restart(); return(AddToken(Tag.OP_GT)); } } // State 20 case 20: { // -> (21) if (CURRENT_CHAR.Equals('=')) { SetState(0, true); return(AddToken(Tag.OP_LE)); } // -> (22) [Other] else { Restart(); return(AddToken(Tag.OP_LT)); } } // State 23 case 23: { // -> (24) if (CURRENT_CHAR.Equals('=')) { SetState(0, true); return(AddToken(Tag.OP_NE)); } // -> 0 [Error] else { SetState(0, false); LexicalError(String.Format("Incomplete token for the symbol '{0}' on line {1} and column {2}.", CURRENT_CHAR, ROW, COLUMN)); } } break; // State 25 case 25: { // -> 27 if (CURRENT_CHAR.Equals('*')) { SetState(27, true); } // -> (26) else if (CURRENT_CHAR.Equals('/')) { SetState(26, true); } // -> (30) [Other] else { Restart(); return(AddToken(Tag.OP_DIV)); } } break; // State 26 [FINAL STATE] case 26: { // ->> (26) if (IsASCII(CURRENT_CHAR)) { SetState(26, true); } // -> 0 else { SetState(0, false); // return AddToken(Tag.COM_ONL); } } break; // State 27 case 27: { // -> 28 if (CURRENT_CHAR.Equals('*')) { SetState(28, true); } // ->> 27 else if (IsASCII(CURRENT_CHAR)) { SetState(27, true); } } break; // State 28 case 28: { // -> (29) if (CURRENT_CHAR.Equals('/')) { SetState(0, true); // return AddToken(Tag.COM_CML); } // ->> 28 else if (CURRENT_CHAR.Equals('*')) { SetState(28, true); } // -> 27 else if (IsASCII(CURRENT_CHAR)) { SetState(27, true); } } break; } } }
public static void Initialize(ref Grammar grammar) { grammar.Add(new Production(ParserConstants.Classes, new List <SubProduction> { new SubProduction(new List <ExpressionDefinition>() { new NonTerminalExpressionDefinition { Identifier = ParserConstants.Class }, new NonTerminalExpressionDefinition { Identifier = ParserConstants.Classes }, new SemanticActionDefinition((ParsingNode node) => { ClassesASTNode astNode = new ClassesASTNode(); astNode.Classes.Add(node.GetAttributeForKey <ClassASTNode>(ParserConstants.Class, ParserConstants.SyntaxTreeNode)); astNode.Classes.AddRange(node.GetAttributeForKey <ClassesASTNode>(ParserConstants.Classes, ParserConstants.SyntaxTreeNode).Classes); node.Attributes.Add(ParserConstants.SyntaxTreeNode, astNode); }) }), //new SubProduction(new List<ExpressionDefinition>() //{ // new NonTerminalExpressionDefinition { Identifier = ParserConstants.Class }, // new SemanticActionDefinition((ParsingNode node) => // { // ClassesASTNode astNode = new ClassesASTNode(); // astNode.Classes.Add(node.GetAttributeForKey<ClassASTNode>(ParserConstants.Class, ParserConstants.SyntaxTreeNode)); // node.Attributes.Add(ParserConstants.SyntaxTreeNode, astNode); // }) //}), new SubProduction ( new List <ExpressionDefinition> { new TerminalExpressionDefinition { TokenType = TokenType.EmptyString }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes.Add(ParserConstants.SyntaxTreeNode, new ClassesASTNode()); }) } ) } )); grammar.Add(new Production("Class", new SubProduction(new List <ExpressionDefinition>() { new TerminalExpressionDefinition { TokenType = TokenType.Class }, new TerminalExpressionDefinition { TokenType = TokenType.Identifier }, new SemanticActionDefinition((ParsingNode node) => { node.Attributes[ParserConstants.SymTable] = node.ChildSymbolTable; string key = node.GetAttributeForKey <WordToken>(ParserConstants.Identifier, ParserConstants.Token).Lexeme; SymbolTableEntry entry = node.ChildSymbolTable.Create(key, SymbolTableEntryType.Class); node.GetNodeForKey("Identifier").Attributes.Add(ParserConstants.SymbolTableEntry, entry); node.GetNodeForKey("Identifier").Attributes.Add(ParserConstants.SyntaxTreeNode, new IdentifierASTNode() { SymbolTableEntry = entry }); }), new TerminalExpressionDefinition { TokenType = TokenType.BracketOpen }, new NonTerminalExpressionDefinition { Identifier = ParserConstants.ClassMembers }, new TerminalExpressionDefinition { TokenType = TokenType.BracketClose }, new SemanticActionDefinition((ParsingNode node) => { ClassASTNode classASTNode = new ClassASTNode(); classASTNode.ClassName = node.GetAttributeForKey <SymbolTableEntry>(ParserConstants.Identifier, ParserConstants.SymbolTableEntry); classASTNode.Children = node.GetAttributeForKey <List <SyntaxTreeNode> >(ParserConstants.ClassMembers, ParserConstants.SyntaxTreeNodes); node.Attributes.Add(ParserConstants.SyntaxTreeNode, classASTNode); }) }) )); }