Ejemplo n.º 1
0
 /// <summary>
 /// Adds all of the rules associated with assignment 1
 /// </summary>
 /// <param name="lexer">Lexer that is going to be performing the analysis</param>
 public static void AddLexerRules(Lexer.Lexer lexer)
 {
     lexer.AddRule(@"if\b",
         s => new Token {Type = TokenType.If})
         .AddRule(@"then\b",
             s => new Token {Type = TokenType.Then})
         .AddRule(@"while\b",
             s => new Token {Type = TokenType.While})
         .AddRule(@"do\b",
             s => new Token {Type = TokenType.Do})
         .AddRule(@"input\b",
             s => new Token {Type = TokenType.Input})
         .AddRule(@"else\b",
             s => new Token {Type = TokenType.Else})
         .AddRule(@"begin\b",
             s => new Token {Type = TokenType.Begin})
         .AddRule(@"end\b",
             s => new Token {Type = TokenType.End})
         .AddRule(@"write\b",
             s => new Token {Type = TokenType.Write})
         .AddRule(@"[a-zA-Z_][a-zA-Z0-9_]*",
             s => new Token {Contents = s, Type = TokenType.Id})
         .AddRule(@"[\d]+",
             s => new Token {Contents = s, Type = TokenType.Num})
         .AddRule(@"\+",
             s => new Token {Type = TokenType.Add})
         .AddRule(@":=",
             s => new Token {Type = TokenType.Assign})
         .AddRule(@"-",
             s => new Token {Type = TokenType.Sub})
         .AddRule(@"\*",
             s => new Token {Type = TokenType.Mul})
         .AddRule(@"/",
             s => new Token {Type = TokenType.Div})
         .AddRule(@"\(",
             s => new Token {Type = TokenType.LPar})
         .AddRule(@"\)",
             s => new Token {Type = TokenType.RPar})
         .AddRule(@";",
             s => new Token {Type = TokenType.Semicolon});
 }
Ejemplo n.º 2
0
        private static void Main(string[] args)
        {
            // Loads input string from source file
            var sourceString = GetSourceFileText(args);

            // initializes lexer and RDP classes
            var lexer = new Lexer.Lexer(Lexer.Lexer.LexerLoggingMode.None);
            var rdp = new RecursiveDescentParser();

            // adds rules to lexer and parser
            RulesModule.AddLexerRules(lexer);
            RulesModule.AddRdpRules(rdp);

            try
            {
                Console.WriteLine("Parsing tokens from source...");
                // parse tokens from source using lexer
                ParseTokens(sourceString, lexer);
                // generate AST from token list
                Console.WriteLine("\n\nGenerating AST...");
                Node rootNode = GenerateAst(rdp, lexer);

                Console.WriteLine();

                // generate AST visualization and compiled code
                GenerateGraphvis(rootNode);
                GenerateCompiledCode(rootNode);
                Console.WriteLine("Success!");
            }
            catch (InvalidTokenException e)
            {
                // Handle token parsing issues
                Console.WriteLine($"Parse Error: {e.Message}");
            }
            catch (UnexpectedTokenException e)
            {
                // unexpected token / syntax error
                Console.WriteLine(e.Message);
            }
            
        }