public static IEnumerable <ClassInfo> OuterClassInfosFromSource(string source, string filePath) { try { char[] codeArray = source.ToCharArray(); AntlrInputStream inputStream = new AntlrInputStream(codeArray, codeArray.Length); JavaLexer lexer = new JavaLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(commonTokenStream); parser.RemoveErrorListeners(); parser.AddErrorListener(new ErrorListener()); // add ours // a compilation unit is the highest level container -> start there // do not call parser.compilationUnit() more than once CompilationUnitListener compilationUnitListener = new CompilationUnitListener(filePath); parser.compilationUnit().EnterRule(compilationUnitListener); return(compilationUnitListener.OuterClassInfos); } catch (Exception e) { Console.WriteLine(e); } return(new List <ClassInfo>()); }
public static bool TryGetLineStatements(string text, int lineNumber, out IList<IParseTree> statementTrees, out IList<IToken> tokens) { Contract.Requires<ArgumentNullException>(text != null, "text"); Contract.Requires<ArgumentOutOfRangeException>(lineNumber >= 0); try { AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); statementTrees = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(lineNumber, tokens); ParseTreeWalker.Default.Walk(listener, result); statementTrees = listener.StatementTrees; return true; } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; statementTrees = null; tokens = null; return false; } }
static void Main(string[] args) { var fileName = args[0]; var stream = new AntlrInputStream(File.ReadAllText(fileName)); var lexer = new JavaLexer(stream); var tokenStream = new CommonTokenStream(lexer); var parser = new JavaParser(tokenStream); var classContext = parser.class_definition(); var methodContext = classContext.method(); var program = new IRVisitor().Visit(methodContext.body()); Console.WriteLine("IR:"); Console.WriteLine(string.Join("\n", program.Select(x => " " + x.ToString()))); Console.WriteLine(); var blocks = BasicBlockCreator.Create(program); Console.WriteLine("Basic Blocks:"); var graphViz = BasicBlockCreator.DumpGraphViz(blocks); Console.WriteLine(graphViz); var definitions = DFA.GetReachingDefinitions(blocks.Last()); Console.WriteLine("Reaching definitions: " + string.Join(", ", definitions)); }
static void Main(string[] args) { if (args.Length > 0) { var visitor = new JavaTreeVisitor(args[0]); var package = new List <string>(); if (args.Length > 1) { foreach (var path in args.Skip(1)) { package.AddRange(Directory.GetFiles(path, "*.java", SearchOption.AllDirectories)); } } else { var landResultsFile = Environment.GetFolderPath(Environment.SpecialFolder.MyDocuments) + @"\LanD Workspace\last_batch_parsing_report.txt"; if (File.Exists(landResultsFile)) { package.AddRange(Directory.GetFiles( File.ReadAllLines(landResultsFile)[1], "*.java", SearchOption.AllDirectories)); } else { Console.WriteLine("Не указаны каталоги для парсинга"); return; } } var totalTime = new TimeSpan(); foreach (var filename in package) { var inputStream = new AntlrInputStream(File.ReadAllText(filename)); var lexer = new JavaLexer(inputStream); var parser = new JavaParser(new CommonTokenStream(lexer)); /// Запускаем парсинг var startTime = DateTime.Now; var context = parser.compilationUnit(); totalTime += DateTime.Now - startTime; visitor.SetFile(filename); context.Accept(visitor); } visitor.CloseOutputs(); Console.WriteLine($"methods: {visitor.MethodCounter}"); Console.WriteLine($"classes: {visitor.ClassInterfaceCounter}"); Console.WriteLine($"enums: {visitor.EnumCounter}"); Console.WriteLine($"fields: {visitor.FieldCounter}"); Console.WriteLine($"field declarations: {visitor.FieldDeclarationCounter}"); Console.WriteLine(totalTime.ToString(@"hh\:mm\:ss\:ff")); } }
private static JavaLexer GetLexer(string path) { var stream = new ANTLRStringStream(File.ReadAllText(path).ToLower()); var lexer = new JavaLexer(stream, new RecognizerSharedState { errorRecovery = true }); return(lexer); }
static void Main(string[] args) { string in_filename = @"..\..\NOD.java"; string out_filename = @"..\..\NOD.class"; AntlrInputStream inputStream = new AntlrFileStream(in_filename); var javaLexer = new JavaLexer(inputStream); CommonTokenStream commonTokenStream = new CommonTokenStream(javaLexer); var javaParser = new JavaParser(commonTokenStream); var visitor = new JavaVisitor(new StackFrame()); visitor.Visit(javaParser.compileUnit()); var stackFrame = visitor.StackFrame; var initInd = stackFrame.GetOrAddMethodref("<init>", "()V", "java/lang/Object"); var methods = new List <MethodInfo>(); var initMethod = new MethodInfo(MethodAccessFlags.ACC_PUBLIC, stackFrame.GetOrAddString("<init>"), stackFrame.GetOrAddString("()V")); var initCodeInstructions = new List <byte>(); initCodeInstructions.Add(0x2A); initCodeInstructions.Add(0xB7); initCodeInstructions.AddRange(Utils.WriteUShort(initInd)); initCodeInstructions.Add(0xB1); var initCodeAttribute = new CodeAttributeParser(1, 1, initCodeInstructions); var initData = initCodeAttribute.BuildData(); initMethod.AttributesCount = 1; initMethod.Attributes = new AttributeDescription[1] { new AttributeDescription(stackFrame.GetOrAddString("Code"), (uint)initData.Length, initData) }; methods.Add(initMethod); var mainMethod = new MethodInfo(MethodAccessFlags.ACC_PUBLIC | MethodAccessFlags.ACC_STATIC, stackFrame.GetOrAddString("main"), stackFrame.GetOrAddString("([Ljava/lang/String;)V")); var mainCodeAttribute = new CodeAttributeParser(stackFrame.maxStackSize, stackFrame.maxLocalsSize, stackFrame.BuildInstructions()); var mainData = mainCodeAttribute.BuildData(); mainMethod.AttributesCount = 1; mainMethod.Attributes = new AttributeDescription[1] { new AttributeDescription(stackFrame.GetOrAddString("Code"), (uint)mainData.Length, mainData) }; methods.Add(mainMethod); var classFile = ClassFile.CreateClassFile(0, 52, ClassAccessFlags.ACC_PUBLIC | ClassAccessFlags.ACC_SUPER, stackFrame.thisClass, stackFrame.constantPool, methods); classFile.SuperClass = stackFrame.GetOrAddClass("java/lang/Object"); File.WriteAllBytes(out_filename, classFile.BuildClassFile()); }
public void Test() { var inputStream = new AntlrInputStream("public /*aa*/ class Klass { }"); var javaLexer = new JavaLexer(inputStream); var commonTokenStream = new CommonTokenStream(javaLexer); var javaParser = new JavaParser(commonTokenStream); var context = javaParser.compilationUnit(); var visitor = new CstBuilderForAntlr4(javaParser); visitor.Visit(context); Console.WriteLine(visitor.FinishParsing().ToXml()); }
//JAVA TO C# CONVERTER WARNING: Method 'throws' clauses are not available in .NET: //ORIGINAL LINE: public static void main(String[] args) throws Exception public static void Main(string[] args) { Thread.Sleep(10000); ANTLRFileStream input = new ANTLRFileStream(args[0]); JavaLexer lexer = new JavaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); JavaParser.CompilationUnitContext tree = parser.compilationUnit(); // System.out.println(tree.toStringTree(parser)); Thread.Sleep(10000); }
private void ParseJava(string input) { AntlrInputStream stream = new AntlrInputStream(input); ITokenSource lexer = new JavaLexer(stream); ITokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); parser.BuildParseTree = true; JavaParser.CompilationUnitContext tree = parser.compilationUnit(); if (tree != null) { var builder = new TreeBuilder(parser, tree, treeModel); builder.Build(); } }
static void TestDefinitions(string resourceName, IEnumerable <int> expected) { var resourceStream = typeof(Tests).Assembly.GetManifestResourceStream($"pt_dfa.tests.{resourceName}"); var stream = new AntlrInputStream(resourceStream); var lexer = new JavaLexer(stream); var tokenStream = new CommonTokenStream(lexer); var parser = new JavaParser(tokenStream); var classContext = parser.class_definition(); var methodContext = classContext.method(); var ir = methodContext.Accept(new IRVisitor()); var blocks = BasicBlockCreator.Create(ir); var definitions = DFA.GetReachingDefinitions(blocks.Last()); Assert.IsTrue(definitions.SequenceEqual(expected)); }
public string Transpile(string javaCode) { var stream = new AntlrInputStream(javaCode); var lexer = new JavaLexer(stream); var tokens = new CommonTokenStream(lexer); var parser = new JavaParser(tokens); parser.BuildParseTree = true; var styleKitVisitor = new StyleKitVisitor(tokens) { Namespace = Namespace }; styleKitVisitor.Visit(parser.compilationUnit()); return(styleKitVisitor.GetResult()); }
public override void Parse(FileInfo file) { Lexer lexer = new JavaLexer(CharStreams.fromPath(file.FullName)); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); ParserRuleContext context = parser.compilationUnit(); var children = context.children; foreach (IParseTree child in children) { this.ParseNode(child); } }
static void Main(string[] args) { int i = 0; bool build_tree = true; for (; i < args.Length; ++i) { switch (args[i]) { case "-notree": build_tree = false; break; } } var input = File.OpenText(args[i - 1]); var str = new AntlrInputStream(input); var lexer = new JavaLexer(str); var tokens = new CommonTokenStream(lexer); var parser = new JavaParser(tokens); var listener = new ErrorListener <IToken>(parser, lexer, tokens); parser.AddErrorListener(listener); parser.BuildParseTree = build_tree; var start = DateTime.Now; var tree = parser.compilationUnit(); if (listener.had_error) { System.Console.WriteLine("error in parse."); } else { System.Console.WriteLine("parse completed."); } var end = DateTime.Now; System.Console.WriteLine(tokens.OutputTokens()); if (tree != null) { System.Console.WriteLine(tree.OutputTree(tokens)); } System.Console.WriteLine(end - start); }
public void Parse(string file, IProject project) { AntlrFileStream filestream = new AntlrFileStream(file); JavaLexer lexer = new JavaLexer(filestream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); var startingPonit = parser.compilationUnit(); OrchestratingListener genericListener = new OrchestratingListener(project); genericListener.ParseInfoUpdate += (string info) => NotifyParseInfoUpdated?.Invoke(info); ParseTreeWalker walker = new ParseTreeWalker(); walker.Walk(genericListener, startingPonit); }
public void ParseJava() { var inputStream = new AntlrInputStream(@" import javax.swing.*; public class Hello extends JFrame { Hello() { setDefaultCloseOperation(WindowConstants.DISPOSE_ON_CLOSE); pack(); } public static void main(String[] args) { new Hello().setVisible(true); } }"); var lexer = new JavaLexer(inputStream); var commonTokenStream = new CommonTokenStream(lexer); var parser = new JavaParser(commonTokenStream); var visitor = new CstBuilderForAntlr4(parser); visitor.Visit(parser.compilationUnit()); Console.WriteLine(visitor.FinishParsing()); }
static void Main(string[] args) { StreamReader sr = new StreamReader("W:/Spring2021/Senior proj/JavaCXGamesMode/JavaCSharp/JavaCSharp/Test.txt"); string input = sr.ReadToEnd(); AntlrInputStream inputStream = new AntlrInputStream(input); ICharStream stream = inputStream; JavaLexer lexer = new JavaLexer(stream); ITokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); IParseTree tree = parser.compilationUnit(); //TokenStreamRewriter rewriter = new TokenStreamRewriter(tokens); //Console.WriteLine(parser.ToString()); //Console.WriteLine(tree.ToStringTree(parser)); JavaVisitor visitor = new JavaVisitor(); visitor.Visit(tree); Console.ReadLine(); }
// Here's where we do the real work... public static void ParseJavaFile(string f) { try { // Create a scanner that reads from the input stream passed to us lexer = new JavaLexer(new ANTLRFileStream(f)); CommonTokenStream tokens = new CommonTokenStream(); tokens.TokenSource = lexer; // Create a parser that reads from the scanner JavaParser parser = new JavaParser(tokens); // start parsing at the compilationUnit rule parser.compilationUnit(); } catch (Exception e) { Console.Error.WriteLine("[ERROR}"); Console.Error.WriteLine("parser exception: " + e); Console.Error.WriteLine(e.StackTrace); // so we can get stack trace } }
static void Main(string[] args) { List <string> options = new List <string>(); List <string> arguments = new List <string>(); string ast_output_file = null; CommandLine.Parser.Default.ParseArguments <Options>(args) .WithParsed <Options>(o => { arguments = o.JavaFiles.ToList(); ast_output_file = o.AstOutFile; }) .WithNotParsed(a => { System.Console.Error.WriteLine(a); }); Runtime.Redirect r = new Runtime.Redirect(ast_output_file); foreach (var file_name in arguments) { var code_as_string = File.ReadAllText(file_name); var input = new AntlrInputStream(code_as_string); var lexer = new JavaLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new JavaParser(tokens); var listener = new ErrorListener <IToken>(); parser.AddErrorListener(listener); JavaParser.CompilationUnitContext tree = parser.compilationUnit(); if (listener.had_error) { return; } var sb = new StringBuilder(); var ser = new Runtime.AstHelpers(); ser.ParenthesizedAST(sb, file_name, tree, tokens); System.Console.WriteLine(sb.ToString()); } r.Dispose(); }
public static void OpenTokenStream(string file_name) { var code_as_string = System.IO.File.ReadAllText(file_name); var input = new AntlrInputStream(code_as_string); var suffix = System.IO.Path.GetExtension(file_name); if (suffix == ".cs") { var lexer = new CSharpLexer(input); tokens = new CommonTokenStream(lexer); } else if (suffix == ".java") { var lexer = new JavaLexer(input); tokens = new CommonTokenStream(lexer); } else { throw new System.Exception("File type not handled '" + suffix + "'"); } tokens.Fill(); }
public static bool TryGetLineStatements(string text, int lineNumber, out IList <IParseTree> statementTrees, out IList <IToken> tokens) { Contract.Requires <ArgumentNullException>(text != null, "text"); Contract.Requires <ArgumentOutOfRangeException>(lineNumber >= 0); try { AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); statementTrees = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(lineNumber, tokens); ParseTreeWalker.Default.Walk(listener, result); statementTrees = listener.StatementTrees; return(true); } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) { throw; } statementTrees = null; tokens = null; return(false); } }
// Here's where we do the real work... public static void parseFile(string f, Stream s) { try { // Create a scanner that reads from the input stream passed to us JavaLexer lexer = new JavaLexer(new StreamReader(s)); lexer.setFilename(f); // Create a parser that reads from the scanner JavaRecognizer parser = new JavaRecognizer(lexer); parser.setFilename(f); // start parsing at the compilationUnit rule parser.compilationUnit(); // do something with the tree doTreeAction(f, parser.getAST(), parser.getTokenNames()); } catch (System.Exception e) { Console.Error.WriteLine("parser exception: " + e); Console.Error.WriteLine(e.StackTrace); // so we can get stack trace } }
private void StartLexicalAnalysis() { //Argument.ThrowIfNull(srcItem, "StartLexicalAnalysis : srcItem"); var tokenizer = new JavaLexer(new ANTLRStringStream(EntireBuffer)); var antlrToken = tokenizer.NextToken(); while (antlrToken.Type != JavaLexer.EOF) { //System.Diagnostics.Debug.WriteLine(string.Format("token : {0}", tokenizer.TokenNames[antlrToken.Type])); //System.Diagnostics.Debug.WriteLine("token : {0}", TokenNames[antlrToken.Type]); // Antlr line starts from 1 int curLine = antlrToken.Line - 1; //int startPosition = 0; //int endPosition = 0; // For each line we only store interesting tokens ( ie not a newline or space) //if (antlrToken.Type != JavaLexer.WS && // antlrToken.Type != JavaLexer.NL) //{ // We need to handle tokens on several lines (ex comments) if (antlrToken.Type != JavaLexer.BLOCK_COMMENT && antlrToken.Type != JavaLexer.JAVADOC_COMMENT) { TokenLines[curLine].Tokens.Add(TokenFactory.CreateToken(antlrToken)); } else { HandleMultiLineToken(antlrToken); } //} //else //{ //// Since we don't store noisy tokens we need to store start/end index //if (antlrToken.Type == JavaLexer.NL) //{ // // Foreach TokenLine we store start/Stop index // int tokLen = antlrToken.Text.Length; // endPosition = antlrToken.StopIndex + tokLen; // TokenLines[curLine].StartPosition = startPosition; // TokenLines[curLine].EndPosition = endPosition; // startPosition = endPosition + 1; //} //} // Get next token from antlr lexer antlrToken = tokenizer.NextToken(); } }
private bool TryGetAssociatedTree(out IParseTree associatedTree, out IList <IToken> tokens) { try { string sourcePath = _location.GetSourcePath(); if (!File.Exists(sourcePath)) { associatedTree = null; tokens = null; return(false); } string text = File.ReadAllText(sourcePath); AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); associatedTree = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(_location, tokens); ParseTreeWalker.Default.Walk(listener, result); List <IParseTree> potentialTrees = listener.AssociatedTree; if (potentialTrees.Count == 1) { associatedTree = potentialTrees[0]; } else if (potentialTrees.Count > 1) { byte[] bytecode = _location.GetMethod().GetBytecodes(); DisassembledMethod disassembledMethod = BytecodeDisassembler.Disassemble(bytecode); var constantPool = _location.GetDeclaringType().GetConstantPool(); var exceptionTable = _location.GetMethod().GetExceptionTable(); ImmutableList <int?> evaluationStackDepths = BytecodeDisassembler.GetEvaluationStackDepths(disassembledMethod, constantPool, exceptionTable); ReadOnlyCollection <ILocation> locations = _location.GetMethod().GetLineLocations(); // find all bytecode offsets with evaluation stack depth 0 on the current line List <int> relevantOffsets = new List <int>(); for (int i = 0; i < locations.Count; i++) { if (locations[i].GetLineNumber() != _location.GetLineNumber()) { continue; } long offsetLimit = i < locations.Count - 1 ? locations[i + 1].GetCodeIndex() : bytecode.Length; // start with the instruction for this bytecode offset for (int j = GetInstructionAtOffset(disassembledMethod, locations[i].GetCodeIndex()); j >= 0 && j < disassembledMethod.Instructions.Count && disassembledMethod.Instructions[j].Offset < offsetLimit; j++) { if (evaluationStackDepths[j] == 0) { // ignore unconditional branches if (disassembledMethod.Instructions[j].OpCode.FlowControl == JavaFlowControl.Branch) { continue; } relevantOffsets.Add(disassembledMethod.Instructions[j].Offset); } } } if (relevantOffsets.Count == potentialTrees.Count) { // heuristic: assume they appear in the same order as the source code on this line int treeIndex = relevantOffsets.IndexOf((int)_location.GetCodeIndex()); if (treeIndex >= 0) { associatedTree = potentialTrees[treeIndex]; } } } if (associatedTree == null) { tokens = null; return(false); } return(true); } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) { throw; } associatedTree = null; tokens = null; return(false); } }
private bool TryGetAssociatedTree(out IParseTree associatedTree, out IList<IToken> tokens) { try { string sourcePath = _location.GetSourcePath(); if (!File.Exists(sourcePath)) { associatedTree = null; tokens = null; return false; } string text = File.ReadAllText(sourcePath); AntlrInputStream input = new AntlrInputStream(text); JavaLexer lexer = new JavaLexer(new JavaUnicodeStreamV4(input)); CommonTokenStream tokenStream = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokenStream); parser.Interpreter.PredictionMode = PredictionMode.Sll; parser.BuildParseTree = true; JavaParser.CompilationUnitContext result = parser.compilationUnit(); associatedTree = null; tokens = tokenStream.GetTokens(); AssociatedTreeListener listener = new AssociatedTreeListener(_location, tokens); ParseTreeWalker.Default.Walk(listener, result); List<IParseTree> potentialTrees = listener.AssociatedTree; if (potentialTrees.Count == 1) { associatedTree = potentialTrees[0]; } else if (potentialTrees.Count > 1) { byte[] bytecode = _location.GetMethod().GetBytecodes(); DisassembledMethod disassembledMethod = BytecodeDisassembler.Disassemble(bytecode); var constantPool = _location.GetDeclaringType().GetConstantPool(); ReadOnlyCollection<ExceptionTableEntry> exceptionTable; try { exceptionTable = _location.GetMethod().GetExceptionTable(); } catch (DebuggerException) { exceptionTable = new ReadOnlyCollection<ExceptionTableEntry>(new ExceptionTableEntry[0]); } ImmutableList<int?> evaluationStackDepths = BytecodeDisassembler.GetEvaluationStackDepths(disassembledMethod, constantPool, exceptionTable); ReadOnlyCollection<ILocation> locations = _location.GetMethod().GetLineLocations(); // find all bytecode offsets with evaluation stack depth 0 on the current line List<int> relevantOffsets = new List<int>(); for (int i = 0; i < locations.Count; i++) { if (locations[i].GetLineNumber() != _location.GetLineNumber()) continue; long offsetLimit = i < locations.Count - 1 ? locations[i + 1].GetCodeIndex() : bytecode.Length; // start with the instruction for this bytecode offset for (int j = GetInstructionAtOffset(disassembledMethod, locations[i].GetCodeIndex()); j >= 0 && j < disassembledMethod.Instructions.Count && disassembledMethod.Instructions[j].Offset < offsetLimit; j++) { if (evaluationStackDepths[j] == 0) { // ignore unconditional branches if (disassembledMethod.Instructions[j].OpCode.FlowControl == JavaFlowControl.Branch) continue; relevantOffsets.Add(disassembledMethod.Instructions[j].Offset); } } } if (relevantOffsets.Count == potentialTrees.Count) { // heuristic: assume they appear in the same order as the source code on this line int treeIndex = relevantOffsets.IndexOf((int)_location.GetCodeIndex()); if (treeIndex >= 0) associatedTree = potentialTrees[treeIndex]; } } if (associatedTree == null) { tokens = null; return false; } return true; } catch (Exception e) { if (ErrorHandler.IsCriticalException(e)) throw; associatedTree = null; tokens = null; return false; } }
private void setStatValues() { if (File.Exists(filepath)) { Stream inputStream = Console.OpenStandardInput(); ANTLRFileStream input = new ANTLRFileStream(filepath); JavaLexer lexer = new JavaLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); JavaParser parser = new JavaParser(tokens); parser.compilationUnit(); //assign parser variables to actual variables String textRead = File.ReadAllText(filepath); totalChars = textRead.Length; className = parser.className; packageName = parser.packageName; totalKeywords = parser.totalKeywords; totalUdis = parser.totalUdis - parser.UdisToRemove; totalConstants = parser.totalConstants; totalSpecialChars = parser.totalSpecialChars; totalWhiteSpace = lexer.totalWhiteSpace; uniqueKeywords = parser.uniqueKeywordSet.Count; uniqueUdis = parser.uniqueUdiSet.Count; uniqueConstants = parser.uniqueConstantsSet.Count; uniqueSpecialChars = parser.uniqueSpecialCharSet.Count; //calculated values percentWhiteSpace = (float)totalWhiteSpace / totalChars * 100; int commentChars = 0; for (int k = 0; k < lexer.commentsSet.Count; k++) { String com = lexer.commentsSet.ElementAt(k).ToString(); commentChars += com.Length; Console.WriteLine("Comment Block Size: " + com.Length); Console.WriteLine("Comment Block Contents\n" + com); Console.WriteLine("---------Comment Block End ------------"); } totalCommentChars = commentChars; percentCommentChars = (float)totalCommentChars / totalChars * 100; //FOR TESTING Console.WriteLine("Package Name: " + parser.packageName); Console.WriteLine("Class Name: " + parser.className); Console.WriteLine("Total Keywords: " + parser.totalKeywords); Console.WriteLine("Total UDIs: " + (parser.totalUdis - parser.UdisToRemove)); Console.WriteLine("Total Constants: " + parser.totalConstants); Console.WriteLine("Total Special Chars: " + parser.totalSpecialChars); Console.WriteLine("Total White Space: " + lexer.totalWhiteSpace); Console.WriteLine("Unique Keywords size: " + parser.uniqueKeywordSet.Count); Console.WriteLine("****** Unique Keyword Contents *********"); for (int i = 0; i < parser.uniqueKeywordSet.Count; i++) { Console.WriteLine(parser.uniqueKeywordSet.ElementAt(i)); } Console.WriteLine("Unique UDI size: " + (parser.uniqueUdiSet.Count - parser.udisToRemoveSet.Count)); Console.WriteLine("****** Unique UDIS Contents *********"); for (int i = 0; i < parser.uniqueUdiSet.Count; i++) { Console.WriteLine(parser.uniqueUdiSet.ElementAt(i)); } Console.WriteLine("Unique Constant size: " + parser.uniqueConstantsSet.Count); Console.WriteLine("****** Unique Constants Contents *********"); for (int i = 0; i < parser.uniqueConstantsSet.Count; i++) { Console.WriteLine(parser.uniqueConstantsSet.ElementAt(i)); } Console.WriteLine("Unique Special Chars size: " + parser.uniqueSpecialCharSet.Count); Console.WriteLine("******* Unique Special Char Contents ********"); for (int j = 0; j < parser.uniqueSpecialCharSet.Count; j++) { Console.WriteLine(parser.uniqueSpecialCharSet.ElementAt(j)); } Console.WriteLine("Comments Size: " + lexer.commentsSet.Count); Console.WriteLine("*********** Comments Contents ************"); for (int j = 0; j < lexer.commentsSet.Count; j++) { Console.WriteLine(lexer.commentsSet.ElementAt(j)); } Console.WriteLine("ALL UDIS FOUND"); for (int k = 0; k < parser.everyUdi.Count; k++) { Console.WriteLine(parser.everyUdi.ElementAt(k)); } } }
private static void Main(String[] args) { var keyword = new Keywords(); var input = new AntlrInputStream(new StringReader(@" // ™This is an example of a single line comment using two slashes /* This is an example of a multiple line comment using the slash and asterisk. This type of comment can be used to hold a lot of information or deactivate code, but it is very important to remember to close the comment. */ package fibsandlies; import java.util.HashMap; /** * This is an example of a Javadoc comment; Javadoc can compile documentation * from this text. Javadoc comments must immediately precede the class, method, or field being documented. */ public class FibCalculator extends Fibonacci implements Calculator { private static Map<Integer, Integer> memoized = new HashMap<Integer, Integer>(); /* * The main method written as follows is used by the JVM as a starting point for the program. */ public static void main(String[] args) { memoized.put(1, 1); memoized.put(2, 1); System.out.println(fibonacci(12)); //Get the 12th Fibonacci number and print to console } /** * An example of a method written in Java, wrapped in a class. * Given a non-negative number FIBINDEX, returns * the Nth Fibonacci number, where N equals FIBINDEX. * @param fibIndex The index of the Fibonacci number * @return The Fibonacci number */ public static int fibonacci(int fibIndex) { if (memoized.containsKey(fibIndex)) { return memoized.get(fibIndex); } else { int answer = fibonacci(fibIndex - 1) + fibonacci(fibIndex - 2); memoized.put(fibIndex, answer); return answer; } } } ")); //var input = new AntlrFileStream("../../../Hemo.Test/Main.hemo"); var lexer = new JavaLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new JavaParser(tokens); var cu = parser.compilationUnit(); // ICollection<Int32> a = new Collection<Int32>(); //a.Remove(1); //var mainHemo = File.ReadAllText("../../../Hemo.Test/Main.hemo"); // foreach (var token in mainHemo.SplitAndKeep()) { // Visit(token); // } //var tokens = mainHemo.Split(new Char[0], StringSplitOptions.RemoveEmptyEntries); //var asdf = Hemo.Tokens; //foreach (String token in tokens) {} }
public static void Main(String[] args) { bool print_tree = false; bool perf = false; bool print_tokens = false; int file_index = 0; if (args.Length == 0) { System.Console.WriteLine("Antlr Java parser."); return; } else { for (int i = 0; i < args.Length; ++i) { if (args[i] == "-t") { print_tree = true; } else if (args[i] == "-p") { perf = true; } else if (args[i] == "-i") { print_tokens = true; } else { file_index = i; } } } int exceptions = 0; var errorListener = new ErrorListener <IToken>(); IParseTree tree = null; CommonTokenStream tokens = null; var start = DateTime.Now; try { var input = File.OpenText(args[file_index]); var str = new AntlrInputStream(input); JavaLexer lexer = new JavaLexer(str); tokens = new CommonTokenStream(lexer); var parser = new JavaParser(tokens); parser.RemoveErrorListeners(); parser.AddErrorListener(errorListener); tree = parser.compilationUnit(); } catch (Exception e) { exceptions++; } System.Console.WriteLine("errors " + errorListener.num_errors + " exceptions " + exceptions); var end = DateTime.Now; if (perf) { System.Console.WriteLine(end - start); } if (print_tokens && tokens != null) { foreach (var token in tokens.GetTokens()) { System.Console.WriteLine("Token " + token.TokenIndex + " " + token.Type + " " + Output.PerformEscapes(token.Text)); } } if (print_tree && tree != null) { System.Console.WriteLine(tree.OutputTree(tokens)); } }