static void HandleFileCs(TestVisitor visitor, string filePath) { FileInfo info = new FileInfo(filePath); if (info.Extension == ".cs") { AntlrFileStream stream = new AntlrFileStream(filePath); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); TestListener listener = new TestListener(parser); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, startContext); StringBuilder streamwritter = new StringBuilder(stream.ToString()); foreach (Tuple <int, string> tup in listener.GetTuples()) { streamwritter.Remove(tup.Item1, tup.Item2.Length).Insert(tup.Item1, tup.Item2); } //visitor.Visit(startContext); StreamWriter writer = new StreamWriter(filePath); writer.Write(streamwritter); writer.Dispose(); } }
/// <summary> /// Create TreeNode from stream /// </summary> /// <param name="stream"></param> /// <param name="tree"></param> private static void GetTreeFromStream(ICharStream stream, TreeNode tree) { CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); Listener listener = new Listener(tree); IParseTree parserTree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, startContext); }
private CSharpParser.Compilation_unitContext ParseFile(string file) { IList <IToken> codeTokens = new List <IToken>(); string sourceCode = File.ReadAllText(file); Lexer preprocessorLexer = new CSharpLexer(new AntlrInputStream(sourceCode)); IList <IToken> tokens = preprocessorLexer.GetAllTokens(); IList <IToken> directiveTokens = new List <IToken>(); ListTokenSource directiveTokenSource = new ListTokenSource(directiveTokens); CommonTokenStream directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream); int index = 0; bool compiliedTokens = true; while (index < tokens.Count) { IToken token = tokens[index]; if (token.Type == CSharpLexer.SHARP) { directiveTokens.Clear(); int directiveTokenIndex = index + 1; while (directiveTokenIndex < tokens.Count && tokens[directiveTokenIndex].Type != CSharpLexer.Eof && tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE && tokens[directiveTokenIndex].Type != CSharpLexer.SHARP) { if (tokens[directiveTokenIndex].Channel != Lexer.Hidden && tokens[directiveTokenIndex].Channel != CSharpLexer.COMMENTS_CHANNEL) { directiveTokens.Add(tokens[directiveTokenIndex]); } directiveTokenIndex++; } directiveTokenSource = new ListTokenSource(directiveTokens); directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); preprocessorParser.TokenStream = directiveTokenStream; CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive(); // if true than next code is valid and not ignored. compiliedTokens = directive.value; index = directiveTokenIndex - 1; } else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && token.Channel != CSharpLexer.COMMENTS_CHANNEL && compiliedTokens) { codeTokens.Add(token); // Collect code tokens. } index++; } ListTokenSource codeTokenSource = new ListTokenSource(codeTokens); CommonTokenStream codeTokenStream = new CommonTokenStream(codeTokenSource); CSharpParser parser = new CSharpParser(codeTokenStream); return(parser.compilation_unit()); }
public static void Main(string[] args) { if (args.Length == 0) { Console.WriteLine("Please name the C# file that you would like to compile as a program argument."); return; } StreamReader sr = new StreamReader(args[0]); ICharStream stream = CharStreams.fromString(sr.ReadToEnd()); CSharpLexer lexer = new CSharpLexer(stream); ITokenStream tokenStream = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokenStream); parser.BuildParseTree = true; IParseTree tree = parser.compilation_unit(); Console.WriteLine(tree.ToStringTree(parser)); AST ast = new AST(); SymbolTable symbolTable = new SymbolTable(); ASTBuilder astBuilder = new ASTBuilder(ast, symbolTable); ParseTreeWalker.Default.Walk(astBuilder, tree); ast.Print(); }
static void Main(string[] args) { //TreeScope root = new TreeScope(null, "root", "root"); //TestListener listener = new TestListener(root); //List<string> folderPaths = new List<string>(); ////folderPaths.Add(@"D:\baitap\dau_tieng\QuanLySanLuong\WindowsFormsApplication6\WindowsFormsApplication6"); //folderPaths.Add(@"C:\Users\HONG PHI\Desktop\check preprocessing directive"); //while (folderPaths.Count > 0) //{ // var directories = Directory.GetDirectories(folderPaths[0]); // if (directories.Length > 0) // folderPaths.AddRange(directories); // var files = Directory.GetFiles(folderPaths[0]); // folderPaths.RemoveAt(0); // files.ToList().ForEach(fp => // { // HandleFileCs(listener, fp); // }); //} //listener.ShowTree(root, Console.Out); FileStream stream = new FileStream(@"C:\Users\HONG PHI\source\repos\Caculator\ShowElementOfCSharpFile_InTree\UpperCaseTestFile.cs"); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); RuleContext context = parser.compilation_unit(); UpperCaseClassName listener = new UpperCaseClassName(tokens); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, context); stream.UpdateFile(listener.ValidCode); }
public static IEnumerable <ClassInfo> OuterClassInfosFromCSharpSource( string source, string filePath) { try { var codeArray = source.ToCharArray(); var inputStream = new AntlrInputStream(codeArray, codeArray.Length); var lexer = new CSharpLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var compilationUnitListener = new CompilationUnitListener(filePath); var parser = new CSharpParser(commonTokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(new ErrorListener()); parser.compilation_unit().EnterRule(compilationUnitListener); return(compilationUnitListener.OuterClassInfos); } catch (Exception e) { Console.WriteLine(e); throw; } return(null); }
//Module quet LOG private static List <ItemObject> scanLogging(string fileName, List <ItemObject> listResult) { if (listResult == null || listResult.Count == 0) { listResult = new List <ItemObject>(); } string code = readFile2(fileName); CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(code)); lexer.RemoveErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); FindLoggingInMethod uploadListener = new FindLoggingInMethod(parser); walker.Walk(uploadListener, tree); if (uploadListener.listMethod != null) { foreach (var item in uploadListener.listMethod) { ItemObject obj = new ItemObject(item.BaselineItem, item.methodName, null, fileName, item.startLine, "FAIL"); listResult.Add(obj); } } return(listResult); }
public static CsharpParseResults InvokeParse(string fileName) { if (string.IsNullOrWhiteSpace(fileName)) { return(null); } if (!System.IO.File.Exists(fileName)) { return(null); } var tr = System.IO.File.OpenRead(fileName); var input = new AntlrInputStream(tr); var lexer = new CSharpLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new CSharpParser(tokens); var tree = parser.compilation_unit(); var walker = new ParseTreeWalker(); var loader = new CsharpParseTree(); walker.Walk(loader, tree); var results = loader.Results; tr.Close(); results.SourceFile = fileName; return(results); }
public void InitOrUpdateParserTreeOfFile(string filePath, string content) { //Build Parser tree from content and save it AntlrInputStream stream = new AntlrInputStream(content); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); _parserRuleContextOfFile[filePath] = startContext; tokenStreams[filePath] = tokens; }
//Module quet SQL private static List <ItemObject> scanSQL(string fileName, List <ItemObject> listResult) { if (listResult == null || listResult.Count == 0) { listResult = new List <ItemObject>(); } string code = readFile2(fileName); CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(code)); lexer.RemoveErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); ExtractClassParser listener = new ExtractClassParser(parser); //FindGlobalVariable listener = new FindGlobalVariable(parser); walker.Walk(listener, tree); //} //Main tracer //sql if (listener.listMethodContext != null) { //Console.WriteLine(filename); List <MethodContext> listMethod = listener.getListMethod(); foreach (var method in listMethod) { ParseTreeWalker methodWalker = new ParseTreeWalker(); FindQueryInMethod queryListener = new FindQueryInMethod(parser, method.lineList); methodWalker.Walk(queryListener, method.context); FindLineOfExpression lineListener = new FindLineOfExpression(parser, method.context, queryListener.listExpressLine, queryListener.commandVar, queryListener.queryVar); methodWalker.Walk(lineListener, method.context); method.lineList = lineListener.listExpressLine; FindUsedMethodInClass methodListener = new FindUsedMethodInClass(parser, method); methodWalker.Walk(methodListener, method.context); if (methodListener.listResult != null) { foreach (var item in methodListener.listResult) { ItemObject obj = new ItemObject(item.BaselineItem, item.methodName, item.listExp, fileName, item.startLine, "FAIL"); listResult.Add(obj); } } } } return(listResult); }
static void Main(string[] args) { String input = "using System;"; ICharStream stream = CharStreams.fromstring(input); ITokenSource lexer = new CSharpLexer(stream); ITokenStream tokens = new CommonTokenStream(lexer); var parser = new CSharpParser(tokens); parser.BuildParseTree = true; var tree = parser.compilation_unit(); var listner = new Listener(); listner.EnterCompilation_unit(tree); }
static void Main(string[] args) { using (StreamReader fileStream = new StreamReader("Program.cs")) { AntlrInputStream inputStream = new AntlrInputStream(fileStream); CSharpLexer lexer = new CSharpLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(commonTokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(new ErrorListener()); // add ours var compilationUnit = parser.compilation_unit(); } }
public override void Parse(FileInfo file) { Lexer lexer = new CSharpLexer(CharStreams.fromPath(file.FullName)); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); ParserRuleContext context = parser.compilation_unit(); var children = context.children; foreach (IParseTree child in children) { this.ParseNode(child); } }
static void HandleFileCs(TestListener listener, string filePath) { FileInfo info = new FileInfo(filePath); if (info.Extension == ".cs") { AntlrFileStream stream = new AntlrFileStream(filePath); CSharpLexer lexer = new CSharpLexer(stream); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); parser.RemoveErrorListeners(); parser.AddErrorListener(new CustomError()); CSharpParser.Compilation_unitContext startContext = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(listener, startContext); } }
private void ParseCSharp(string input) { Lexer preprocessorLexer = new CSharpLexer(new AntlrInputStream(input)); // Collect all tokens with lexer (CSharpLexer.g4). var tokens = preprocessorLexer.GetAllTokens(); //TODO: handle preprocessor tokens var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); CSharpParser parser = new CSharpParser(codeTokenStream); // Parse syntax tree (CSharpParser.g4) var tree = parser.compilation_unit(); if (tree != null) { var builder = new TreeBuilder(parser, tree, treeModel); builder.Build(); } }
//Module quet XXE private static List <ItemObject> scanXXE(string fileName, List <ItemObject> listResult) { if (listResult == null || listResult.Count == 0) { listResult = new List <ItemObject>(); } string code = readFile2(fileName); CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(code)); lexer.RemoveErrorListeners(); CommonTokenStream tokens = new CommonTokenStream(lexer); CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ParseTreeWalker walker = new ParseTreeWalker(); ExtractClassParser listener = new ExtractClassParser(parser); //FindGlobalVariable listener = new FindGlobalVariable(parser); walker.Walk(listener, tree); if (listener.listXMLContext != null) { List <ParserRuleContext> listMethod = listener.listXMLContext; foreach (var method in listMethod) { ParseTreeWalker methodWalker = new ParseTreeWalker(); FindXXEInMethod methodListener = new FindXXEInMethod(parser); methodWalker.Walk(methodListener, method); if (methodListener.isVuln) { ItemObject obj = new ItemObject(methodListener.tmpMethod.BaselineItem, methodListener.tmpMethod.methodName, null, fileName, methodListener.tmpMethod.startLine, "FAIL"); listResult.Add(obj); } } } return(listResult); }
static void Main(string[] args) { List <string> options = new List <string>(); List <string> arguments = new List <string>(); string ast_output_file = null; CommandLine.Parser.Default.ParseArguments <Options>(args) .WithParsed <Options>(o => { arguments = o.CsharpFiles.ToList(); ast_output_file = o.AstOutFile; }) .WithNotParsed(a => { System.Console.Error.WriteLine(a); }); Runtime.Redirect r = new Runtime.Redirect(ast_output_file); foreach (var file_name in arguments) { var code_as_string = File.ReadAllText(file_name); var input = new AntlrInputStream(code_as_string); var lexer = new CSharpLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new CSharpParser(tokens); var listener = new ErrorListener <IToken>(); parser.AddErrorListener(listener); CSharpParser.Compilation_unitContext tree = parser.compilation_unit(); if (listener.had_error) { return; } var sb = new StringBuilder(); var ser = new Runtime.AstHelpers(); ser.ParenthesizedAST(sb, file_name, tree, tokens); System.Console.Error.WriteLine(sb.ToString()); } r.Dispose(); }
public static void Main(string[] args) { try { string[] files = Directory.GetFiles(".", "*.cs"); foreach (string file in files) { Console.WriteLine(file + "---------------------------------------"); //Read the file string text = File.ReadAllText(file); //Create the lexer CSharpLexer lexer = new CSharpLexer(new AntlrInputStream(text)); var tokens = lexer.GetAllTokens(); List <IToken> codeTokens = new List <IToken>(); List <IToken> commentTokens = new List <IToken>(); var directiveTokens = new List <IToken>(); var directiveTokenSource = new ListTokenSource(directiveTokens); var directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream); int index = 0; bool compiliedTokens = true; while (index < tokens.Count) { var token = tokens[index]; if (token.Type == CSharpLexer.SHARP) { directiveTokens.Clear(); int directiveTokenIndex = index + 1; // Collect all preprocessor directive tokens. while (directiveTokenIndex < tokens.Count && tokens[directiveTokenIndex].Type != CSharpLexer.Eof && tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE && tokens[directiveTokenIndex].Type != CSharpLexer.SHARP) { if (tokens[directiveTokenIndex].Channel == CSharpLexer.COMMENTS_CHANNEL) { commentTokens.Add(tokens[directiveTokenIndex]); } else if (tokens[directiveTokenIndex].Channel != Lexer.Hidden) { //Console.WriteLine(allTokens[directiveTokenIndex] + " HOLA"); directiveTokens.Add(tokens[directiveTokenIndex]); } directiveTokenIndex++; } directiveTokenSource = new ListTokenSource(directiveTokens); directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); preprocessorParser.TokenStream = directiveTokenStream; //preprocessorParser.SetInputStream(directiveTokenStream); preprocessorParser.Reset(); // Parse condition in preprocessor directive (based on CSharpPreprocessorParser.g4 grammar). CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive(); // if true than next code is valid and not ignored. compiliedTokens = directive.value; String directiveStr = tokens[index + 1].Text.Trim(); if ("line".Equals(directiveStr) || "error".Equals(directiveStr) || "warning".Equals(directiveStr) || "define".Equals(directiveStr) || "endregion".Equals(directiveStr) || "endif".Equals(directiveStr) || "pragma".Equals(directiveStr)) { //Console.WriteLine(directiveStr); compiliedTokens = true; } String conditionalSymbol = null; if ("define".Equals(tokens[index + 1].Text)) { // add to the conditional symbols conditionalSymbol = tokens[index + 2].Text; preprocessorParser.ConditionalSymbols.Add(conditionalSymbol); } if ("undef".Equals(tokens[index + 1].Text)) { conditionalSymbol = tokens[index + 2].Text; preprocessorParser.ConditionalSymbols.Remove(conditionalSymbol); } //This code deletes the directive tokens from the input so that they don't interfere with the parsing process // In all of the cases, we have to remove at least two positions of the tokens array tokens.RemoveAt(directiveTokenIndex - 1); tokens.RemoveAt(directiveTokenIndex - 2); if ("pragma".Equals(directiveStr) || "warning".Equals(directiveStr) || "region".Equals(directiveStr) || "error".Equals(directiveStr)) { // Remove three positions before tokens.RemoveAt(directiveTokenIndex - 3); directiveTokenIndex--; } else if ("define".Equals(directiveStr) || "undef".Equals(directiveStr) || "if".Equals(directiveStr) || "elif".Equals(directiveStr) || "line".Equals(directiveStr)) { // Remove four positions before tokens.RemoveAt(directiveTokenIndex - 3); tokens.RemoveAt(directiveTokenIndex - 4); directiveTokenIndex -= 2; } directiveTokenIndex -= 2; index = directiveTokenIndex - 1; } else if (token.Channel == CSharpLexer.COMMENTS_CHANNEL) { commentTokens.Add(token); // Colect comment tokens (if required). } else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && compiliedTokens) { codeTokens.Add(token); // Collect code tokens. } index++; } // At second stage tokens parsed in usual way. var codeTokenSource = new ListTokenSource(tokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); CSharpParser parser = new CSharpParser(codeTokenStream); ////Create the token stream //CommonTokenStream tokens = new CommonTokenStream(lexer); //CSharpParser parser = new CSharpParser(tokens); IParseTree tree = parser.compilation_unit(); ////Walk the tree ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(new ProgressPrinter(), tree); } } catch (Exception e) { Console.WriteLine("Error (Program.cs): " + e); } }
private static CSharpAST GetAST(string filename) { FileStream s = new FileStream(filename, FileMode.Open, FileAccess.Read); CSharpLexer lexer = new CSharpLexer(s); lexer.setFilename(filename); CSharpParser parser = new CSharpParser(lexer); parser.setFilename(filename); parser.compilation_unit(); s.Close(); CSharpAST antlrTree = (CSharpAST)(parser.getAST()); antlrTree.FileName = filename; return antlrTree; }
static void Try(string ffn) { var sourceCode = System.IO.File.ReadAllText(ffn); List <IToken> codeTokens = new List <IToken>(); List <IToken> commentTokens = new List <IToken>(); Lexer preprocessorLexer = new CSharpLexer(new AntlrInputStream(sourceCode)); // Collect all tokens with lexer (CSharpLexer.g4). var tokens = preprocessorLexer.GetAllTokens(); var directiveTokens = new List <IToken>(); var directiveTokenSource = new ListTokenSource(directiveTokens); var directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); CSharpPreprocessorParser preprocessorParser = new CSharpPreprocessorParser(directiveTokenStream); int index = 0; bool compiliedTokens = true; while (index < tokens.Count) { var token = tokens[index]; if (token.Type == CSharpLexer.SHARP) { directiveTokens.Clear(); int directiveTokenIndex = index + 1; // Collect all preprocessor directive tokens. while (directiveTokenIndex < tokens.Count && tokens[directiveTokenIndex].Type != CSharpLexer.Eof && tokens[directiveTokenIndex].Type != CSharpLexer.DIRECTIVE_NEW_LINE && tokens[directiveTokenIndex].Type != CSharpLexer.SHARP) { if (tokens[directiveTokenIndex].Channel == CSharpLexer.COMMENTS_CHANNEL) { commentTokens.Add(tokens[directiveTokenIndex]); } else if (tokens[directiveTokenIndex].Channel != Lexer.Hidden) { directiveTokens.Add(tokens[directiveTokenIndex]); } directiveTokenIndex++; } directiveTokenSource = new ListTokenSource(directiveTokens); directiveTokenStream = new CommonTokenStream(directiveTokenSource, CSharpLexer.DIRECTIVE); preprocessorParser.TokenStream = directiveTokenStream; preprocessorParser.Reset(); // Parse condition in preprocessor directive (based on CSharpPreprocessorParser.g4 grammar). CSharpPreprocessorParser.Preprocessor_directiveContext directive = preprocessorParser.preprocessor_directive(); // if true than next code is valid and not ignored. compiliedTokens = directive.value; var directiveStr = tokens[index + 1].Text.Trim(); if ("line".Equals(directiveStr) || "error".Equals(directiveStr) || "warning".Equals(directiveStr) || "define".Equals(directiveStr) || "endregion".Equals(directiveStr) || "endif".Equals(directiveStr) || "pragma".Equals(directiveStr)) { compiliedTokens = true; } string conditionalSymbol = null; if ("define".Equals(tokens[index + 1].Text)) { // add to the conditional symbols conditionalSymbol = tokens[index + 2].Text; preprocessorParser.ConditionalSymbols.Add(conditionalSymbol); } if ("undef".Equals(tokens[index + 1].Text)) { conditionalSymbol = tokens[index + 2].Text; preprocessorParser.ConditionalSymbols.Remove(conditionalSymbol); } index = directiveTokenIndex - 1; } else if (token.Channel == CSharpLexer.COMMENTS_CHANNEL) { commentTokens.Add(token); // Colect comment tokens (if required). } else if (token.Channel != Lexer.Hidden && token.Type != CSharpLexer.DIRECTIVE_NEW_LINE && compiliedTokens) { codeTokens.Add(token); // Collect code tokens. } index++; } // At second stage tokens parsed in usual way. var codeTokenSource = new ListTokenSource(codeTokens); var codeTokenStream = new CommonTokenStream(codeTokenSource); CSharpParser parser = new CSharpParser(codeTokenStream); // Parse syntax tree (CSharpParser.g4) var listener = new ErrorListener <IToken>(parser, preprocessorLexer, codeTokenStream); parser.AddErrorListener(listener); var tree = parser.compilation_unit(); if (listener.had_error) { System.Console.WriteLine("error in parse."); } else { System.Console.WriteLine("parse completed."); } System.Console.WriteLine(codeTokenStream.OutputTokens(preprocessorLexer)); System.Console.WriteLine(tree.OutputTree(codeTokenStream, preprocessorLexer)); }