private static void CompileFile(Stream fileStream, string fileName, StreamWriter outStream, TypeStorage typeStorage, ILogger logger, ILogger treeLogger, INameConvertor nameConvertor) { logger.Log(String.Format("----Файл {0}----", fileName)); var stream = new ANTLRInputStream(fileStream); var lexer = new PlsqlLexer(stream); var parser = new PlsqlParser(new CommonTokenStream(lexer)); var tree = parser.program().Tree as CommonTree; if (treeLogger != null) { treeLogger.Log(ASTPrinter.Print(tree)); } var semanticAnalyser = new SemanticAnalyser(typeStorage, logger); bool res = semanticAnalyser.Check(tree); if (res) { var optimizer = new Optimizer(); optimizer.Optimize(tree); var codegenerator = new Codegenerator(nameConvertor); string code = codegenerator.Generate(tree, typeStorage); outStream.Write(code); } logger.Log(string.Format("--------", fileName)); }
static void Main(string[] args) { int port = 49101; string sql = "select * from t1 left join t2"; MemoryStream ms = new MemoryStream(ASCIIEncoding.ASCII.GetBytes(sql.ToUpper())); ANTLRInputStream input = new ANTLRInputStream(ms); MySQL51Lexer lexer = new MySQL51Lexer(input); // I need a Tree adaptor to build a DebugEventSocketProxy, but I need a // DebugEventSocketProxy to build a Tree Adaptor. // Solution: Create the DebugEventSocketProxy //ITreeAdaptor adaptor = new DebugTreeAdaptor(/*dbg*/ null, new CommonTreeAdaptor()); // To create a DebugTokenStream I need a DebugEventSocketProxy and viceversa // Solution: Create DebugEventSocketProxy in the DebugTokenStream contructor // How do I get a ITokenStream implementation? // Another Caveat: The instance of DebugEventProxySocket must be the same for the lexer than for the parser. //DebugEventSocketProxy proxy = new DebugEventSocketProxy(this, port, adaptor); DebugTokenStream tokens = new DebugTokenStream(new BufferedTokenStream(lexer), port, null); //CommonTokenStream tokens = new CommonTokenStream(lexer); MySQL51Parser parser = new MySQL51Parser(tokens, port, null); StringBuilder sb = new StringBuilder(); TextWriter tw = new StringWriter(sb); try { parser.TraceDestination = tw; MySQL51Parser.statement_list_return r = parser.statement_list(); } catch (RecognitionException re) { Console.WriteLine(re.StackTrace); } }
private CommonTree Parse(string formula) { var m = new MemoryStream(Encoding.UTF8.GetBytes(formula)); // create a CharStream that reads from standard input var input = new ANTLRInputStream(m); // create a lexer that feeds off of input CharStream var lexer = new FormulaLexer(input); // create a buffer of tokens pulled from the lexer var tokens = new CommonTokenStream(lexer); // create a parser that feeds off the tokens buffer var parser = new FormulaParser(tokens); var tree = parser.start(); var errors = lexer.GetErrors().Union(parser.GetErrors()); foreach (var error in errors) { BuildInfo.Log(error.ToString(), FormulaCompilerBuildInfoLevels.Error); } if (errors.Count() > 0) { return(null); } var root = (CommonTree)tree.Tree; BuildInfo.Log(root.ToStringTree(), FormulaCompilerBuildInfoLevels.AST); return(root); }
public static CodeBuffTokenStream tokenize(string doc, Type lexerClass) { ANTLRInputStream input = new ANTLRInputStream(doc); Lexer lexer = getLexer(lexerClass, input); CodeBuffTokenStream tokens = new CodeBuffTokenStream(lexer); tokens.Fill(); return(tokens); }
public static Grouping Parse(string file) { using (FileStream stream = File.OpenRead(file)) { ANTLRInputStream antlrStream = new ANTLRInputStream(stream); GroupingDefinitionLanguageLexer lexer = new GroupingDefinitionLanguageLexer(antlrStream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); GroupingDefinitionLanguageParser parser = new GroupingDefinitionLanguageParser(tokenStream); return(parser.definition()); } }
static void Main(string[] args) { var inputStream = Console.OpenStandardInput(); var input = new ANTLRInputStream(inputStream); var lexer = new CalculatorLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new CalculatorParser(tokens); int answer = parser.addSubExpr(); Console.WriteLine("Answer = {0}", answer); }
public static List <AbstractWorkflow> Parse(string file) { using (FileStream stream = File.OpenRead(file)) { ANTLRInputStream antlrStream = new ANTLRInputStream(stream); WorkflowDefinitionLanguageLexer lexer = new WorkflowDefinitionLanguageLexer(antlrStream); CommonTokenStream tokenStream = new CommonTokenStream(lexer); WorkflowDefinitionLanguageParser parser = new WorkflowDefinitionLanguageParser(tokenStream); return(parser.definition()); } }
private static void _Main(string[] args) { // input "x = 2*(3+3)" ICharStream input; if (args.Length > 0) { if (args[0].Equals("-i")) { if (args.Length > 1) { input = new ANTLRFileStream(args[1]); } else { throw new Exception("No input file specified."); } } else { input = new ANTLRStringStream(args[0]); } } else { input = new ANTLRInputStream(Console.OpenStandardInput()); } var lex = new VecMathLexer(input); var tokens = new CommonTokenStream(lex); var g = new VecMathParser(tokens); IAstRuleReturnScope <CommonTree> r = g.prog(); CommonTree t = r.Tree; Console.WriteLine("Original tree: " + t.ToStringTree()); var simplify = new Simplify(new CommonTreeNodeStream(t)); t = (CommonTree)simplify.Downup(t); var reduce = new Reduce(new CommonTreeNodeStream(t)); t = (CommonTree)reduce.Downup(t); Console.WriteLine("Simplified tree: " + t.ToStringTree()); Console.ReadKey(); }
public static CommonTree Parse(string formula) { var m = new MemoryStream(Encoding.UTF8.GetBytes(formula)); var input = new ANTLRInputStream(m); // Create lexer var lexer = new SimpleLexer(input); var tokens = new CommonTokenStream(lexer); // Create parser and start evaluating from start rule var parser = new SimpleParser(tokens); var tree = parser.start(); var root = (CommonTree)tree.Tree; return(root); }
public static InputDocument parse(string fileName, string content, LangDescriptor language) { ANTLRInputStream input = new ANTLRInputStream(content); Lexer lexer = getLexer(language.lexerClass, input); input.name = fileName; InputDocument doc = new InputDocument(fileName, content, language); doc.tokens = new CodeBuffTokenStream(lexer); doc.parser = getParser(language.parserClass, doc.tokens); doc.parser.BuildParseTree = true; // two-stage parsing. Try with SLL first doc.parser.Interpreter.PredictionMode = Antlr4.Runtime.Atn.PredictionMode.SLL; doc.parser.ErrorHandler = new BailErrorStrategy(); doc.parser.RemoveErrorListeners(); MethodInfo startRule = language.parserClass.GetMethod(language.startRuleName); try { doc.Tree = (ParserRuleContext)startRule.Invoke(doc.parser, (object[])null); } catch (Exception ex) { if (ex.InnerException is ParseCanceledException) { doc.parser.Reset(); doc.tokens.Reset(); // rewind input stream // back to standard listeners/handlers doc.parser.AddErrorListener(new ANTLRErrorListenerAnonymousInnerClass()); doc.parser.ErrorHandler = new DefaultErrorStrategy(); doc.parser.Interpreter.PredictionMode = PredictionMode.LL; doc.Tree = (ParserRuleContext)startRule.Invoke(doc.parser, (object[])null); if (doc.parser.NumberOfSyntaxErrors > 0) { doc.Tree = null; } } } return(doc); }
private static Stylesheet Create(Stream stream, bool isExprTreeAllowed) { var input = new ANTLRInputStream(stream); var lexer = new MapCssLexer(input); var tokens = new CommonTokenStream(lexer); var parser = new MapCssParser(tokens); var styleSheet = parser.stylesheet(); var tree = styleSheet.Tree as Antlr.Runtime.Tree.CommonTree; // NOTE we cannot use expression trees on some platforms (e.g. web player) var visitor = new MapCssVisitor(isExprTreeAllowed); var stylesheet = visitor.Visit(tree); // NOTE this prevents memory leak inside Antlr library tokens.TokenSource = null; return(stylesheet); }
private List <SourceFile> Parse(List <string> files) { var res = new List <SourceFile>(); foreach (var item in files) { using (var fileStream = new FileStream(item, FileMode.Open)) { var stream = new ANTLRInputStream(fileStream); var lexer = new PlsqlLexer(stream); var parser = new PlsqlParser(new CommonTokenStream(lexer)); var tree = parser.program().Tree as CommonTree; res.Add(new SourceFile(item, tree)); } } return(res); }
/*static void Main(string[] args) * { * }*/ static void Main(string[] args) { Console.Write("|| PRACTICA 1 ||\n"); Console.Write(":: Escribe tu expresion aritmetica ::\n"); string line = ""; //VARIABLE PARA ALMACENAR LA CADENA DE ENTRADA while (true) { line = Console.ReadLine(); //SE ALMACENA LA CADENA DE ENTRADA if (line.Contains("EXIT") || line.Contains("exit")) { //SI DETECTA EXIT SALE DEL PROGRAMA break; } var entrada = line + Environment.NewLine; byte[] byteArray = Encoding.ASCII.GetBytes(entrada); MemoryStream stream = new MemoryStream(byteArray); var parametro1 = new ANTLRInputStream(stream); Calculadora1Lexer lex = new Calculadora1Lexer(parametro1); //CREAMOS UN LEXER CON LA CADENA QUE ESCRIBIO EL USUARIO CommonTokenStream tokens = new CommonTokenStream(lex); //CREAMOS LOS TOKENS SEGUN EL LEXER CREADO Calculadora1Parser parser = new Calculadora1Parser(tokens); //CREAMOS EL PARSER CON LOS TOKENS CREADOS try { int iResultado = parser.expresion(); Console.WriteLine(line + " = " + iResultado); //SE VERIFICA QUE EL ANALIZADOR EMPIECE CON LA EXPRESION } catch (RecognitionException e) { Console.Error.WriteLine(e.StackTrace); } } }
public void Scan() { Stream inputStream = File.OpenRead(path); //Stream inputStream = Console.OpenStandardInput(); ANTLRInputStream input = new ANTLRInputStream(inputStream); Java_MITLexer lexer = new Java_MITLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); Java_MITParser parser = new Java_MITParser(tokens); parser.compilationUnit(); // Get information CharCt = lexer.getCharCount(); WhitespaceCt = lexer.getWhiteSpaceCount(); CommentCharCt = lexer.getCommentCharCount(); int PercentActiveChar = 0; if ((CharCt + WhitespaceCt + CommentCharCt) != 0) { PercentActiveChar = CharCt / (CharCt + WhitespaceCt + CommentCharCt); } packages = parser.getPackages(); classes = parser.getClasses(); methods = parser.getMethods(); // Show File information Console.WriteLine("\n\n\nValues pulled from " + path + ":"); Console.WriteLine("File character count = " + lexer.getCharCount() + "."); Console.WriteLine("File white space count = " + lexer.getWhiteSpaceCount() + "."); Console.WriteLine("File comment character count = " + lexer.getCommentCharCount() + "."); Console.WriteLine("File percent active characters = " + PercentActiveChar.ToString() + "."); // Show Package information ShowPackageInfo(); ShowClassInfo(); ShowMethodInfo(); }
public static JsonObject Parse(Stream stream) { var inputStream = new ANTLRInputStream(stream); return(ParseANTLRStream(inputStream)); }
static void Main(string[] args) { ICharStream input = null; if (args.Length > 0) { if (args[0] == "bytecompiler") { input = new ANTLRInputStream(Console.OpenStandardInput()); ByteCompilerFrontend fe = new ByteCompilerFrontend(); fe.Listener = new DefaultLSLListener(); fe.Compile(input); CompiledScript script = fe.Result; if (script != null) { System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); watch.Start(); Interpreter i = new Interpreter(script, null); i.TraceDestination = Console.Out; while (i.ScriptState.RunState == RuntimeState.Status.Running) { i.Tick(); } watch.Stop(); System.Console.WriteLine(watch.ElapsedMilliseconds / 1000.0); } } else { foreach (string arg in args) { Console.WriteLine("Compiling: " + arg); input = new ANTLRFileStream(arg); CompilerFrontend fe = new CompilerFrontend(); fe.TraceDestination = Console.Out; fe.Compile(input); } } } else { ILSLListener listener = new DefaultLSLListener(); LSLListenerTraceRedirectorMono redirector = new LSLListenerTraceRedirectorMono(listener); input = new ANTLRInputStream(Console.OpenStandardInput()); CompilerFrontend fe = new CompilerFrontend(); fe.TraceDestination = redirector; fe.Listener = listener; Console.WriteLine("** compilation output **"); string byteCode = fe.Compile(input); if (!listener.HasErrors() && byteCode != null) { input = new ANTLRStringStream(byteCode); ByteCompilerFrontend bfe = new ByteCompilerFrontend(); bfe.TraceDestination = redirector; bfe.Listener = listener; bfe.Compile(input); CompiledScript script = bfe.Result; Console.WriteLine("** usage info **"); if (script != null) { Console.WriteLine("Base memory: {0} bytes", script.CalcBaseMemorySize()); } //SaveScript(script); /* * if (script != null) * { * System.Diagnostics.Stopwatch watch = new System.Diagnostics.Stopwatch(); * watch.Start(); * Interpreter i = new Interpreter(script, null); * i.TraceDestination = Console.Out; * while (i.ScriptState.RunState == RuntimeState.Status.Running) * { * i.Tick(); * } * * watch.Stop(); * System.Console.WriteLine("Execution: {0} seconds", watch.ElapsedMilliseconds / 1000.0); * System.Console.WriteLine("Free Memory: {0} bytes", i.ScriptState.MemInfo.MemoryFree); * } * */ } } }
public void Run(Options options) { GlobalScope globalScope = new GlobalScope(); Loader ld = new Loader(globalScope, options); ld.Load(options.References); Types.RegisterType("Boolean", (IType)globalScope.GetSymbol("System.Boolean")); Types.RegisterType("Char", (IType)globalScope.GetSymbol("System.Char")); Types.RegisterType("SByte", (IType)globalScope.GetSymbol("System.SByte")); Types.RegisterType("Byte", (IType)globalScope.GetSymbol("System.Byte")); Types.RegisterType("Int16", (IType)globalScope.GetSymbol("System.Int16")); Types.RegisterType("UInt16", (IType)globalScope.GetSymbol("System.UInt16")); Types.RegisterType("Int32", (IType)globalScope.GetSymbol("System.Int32")); Types.RegisterType("UInt32", (IType)globalScope.GetSymbol("System.UInt32")); Types.RegisterType("Int64", (IType)globalScope.GetSymbol("System.Int64")); Types.RegisterType("UInt64", (IType)globalScope.GetSymbol("System.UInt64")); Types.RegisterType("IntPtr", (IType)globalScope.GetSymbol("System.IntPtr")); Types.RegisterType("UIntPtr", (IType)globalScope.GetSymbol("System.UIntPtr")); Types.RegisterType("Single", (IType)globalScope.GetSymbol("System.Single")); Types.RegisterType("Double", (IType)globalScope.GetSymbol("System.Double")); Types.RegisterType("String", (IType)globalScope.GetSymbol("System.String")); Types.RegisterType("Object", (IType)globalScope.GetSymbol("System.Object")); Types.RegisterType("ValueType", (IType)globalScope.GetSymbol("System.ValueType")); Types.RegisterType("Enum", (IType)globalScope.GetSymbol("System.Enum")); Types.RegisterType("Void", (IType)globalScope.GetSymbol("System.Void")); Types.RegisterType("Array", (IType)globalScope.GetSymbol("System.Array")); Types.RegisterType("Exception", (IType)globalScope.GetSymbol("System.Exception")); Types.RegisterType("Type", (IType)globalScope.GetSymbol("System.Type")); Types.RegisterType("MulticastDelegate", (IType)globalScope.GetSymbol("System.MulticastDelegate")); Types.RegisterType("IAsyncResult", (IType)globalScope.GetSymbol("System.IAsyncResult")); Types.RegisterType("AsyncCallback", (IType)globalScope.GetSymbol("System.AsyncCallback")); TypesHelper th = new TypesHelper(); th.Prepare(); Types.ResultTable = th.ResultTable; Types.PromotionTable = th.PromotionTable; int files_number = options.FilesNumber; if (files_number == 0) { Report.Error.NoFilesToCompile(); } List <FileNamespace> file_namespace = new List <FileNamespace>(); List <CompilationUnit> compilation_units = new List <CompilationUnit>(); foreach (string source in options.Files) { try { FileStream file = new FileStream(source, FileMode.Open, FileAccess.Read); ANTLRInputStream input = new ANTLRInputStream(file); GrammarLexer lexer = new GrammarLexer(input); CommonTokenStream tokens = new CommonTokenStream(lexer); GrammarParser parser = new GrammarParser(tokens); compilation_units.Add(parser.program()); file_namespace.Add(new FileNamespace(globalScope)); } catch (FileNotFoundException) { Report.Error.SourceFileNotFound(source); } catch (DirectoryNotFoundException) { Report.Error.SourceFileNotFound(source); } catch (IOException) { Report.Error.IOError(source); } } CodeGen codegen = new CodeGen(options); for (int i = 0; i < files_number; ++i) { compilation_units[i].DefineSymbols(new Context(file_namespace[i])); } for (int i = 0; i < files_number; ++i) { compilation_units[i].ResolveSymbols(new Context(file_namespace[i])); } codegen.BuildAssembly(compilation_units); }
public static Lexer getLexer(Type lexerClass, ANTLRInputStream input) { object o = Activator.CreateInstance(lexerClass, new object[] { input }); return((Lexer)o); }