public VMModule?CompileAndLink( string source, string filename, InteropResolver resolver, List <ParseError> parseErrors, List <LinkError> linkErrors ) { Linker linker = new Linker(linkErrors); ParseModule parsedModule = CreateInputParseModuleFromInteropResolver(resolver); SetupLinkerWithInteropResolver(linker, resolver); ILModule?compiledModule = Compile(source, filename, parseErrors, parsedModule); if (compiledModule == null) { return(null); } linker.AddModule(compiledModule, export: true); VMModule linkedModule = linker.Link(); if (linkErrors.Count > 0) { return(null); } return(linkedModule); }
private ParseModule CreateInputParseModuleFromInteropResolver(InteropResolver resolver) { ParseModule module = new ParseModule(); module.Functions.AddRange(resolver.Bindings.Select(b => b.Prototype)); module.Enums.AddRange(resolver.Enums); return(module); }
public ParseModule ParseAndAnalyze( string source, string filename, List <ParseError> errors, InteropResolver resolver ) { ParseModule module = CreateInputParseModuleFromInteropResolver(resolver); return(ParseAndAnalyze(source, filename, errors, module)); }
public ParseModule CreateHeaderModule() { ParseModule module = new ParseModule(); module.Functions.AddRange(_bindings.Values.Select(binding => binding.Prototype)); module.Enums.AddRange(_enums); foreach (var token in _tokenMap) { module.SymbolMap.Add(token.Key, token.Value); } return(module); }
public static void Main() { ParseModule pm = new ParseModule(); pm.ParseString(); Console.WriteLine(); pm.ParseStringWithIFormatProvider(); Console.WriteLine(); pm.ParseWithCustomIFormatProvider(); Console.WriteLine(); pm.ParseWithStyle(); pm.ParseOverload4(); }
public ILModule?Compile( string source, string filename, List <ParseError> errors, ParseModule?targetModule = null ) { ParseModule module = ParseAndAnalyze(source, filename, errors, targetModule); if (errors.Count > 0) { return(null); } return(Compile(module)); }
public void solveTest() { var inputData = new Dictionary <double, string>() { [-67] = "-12x3+3x2+5x+7", [23] = "+23", [-2869983] = "-99999x5+9999x3+9999x2+99999x+9999", }; foreach (var testRow in inputData) { var coefs = ParseModule.parseAll(testRow.Value); var rez = ComputingModule.calculatePolynom(coefs, 2); Assert.AreEqual(testRow.Key, rez); } }
public void ParseAll() { var testPolynoms = new Dictionary <string, List <int> >() { ["5"] = new List <int>() { 5 }, // + ["-5"] = new List <int>() { -5 }, //+ ["x+5"] = new List <int>() { 1, 5 }, ["2x2+3x+5"] = new List <int>() { 2, 3, 5 }, // + ["-2x2+3x+5"] = new List <int>() { -2, 3, 5 }, ["-12x2+3x+5"] = new List <int>() { -12, 3, 5 }, ["-12x3+3x2+5x+7"] = new List <int>() { -12, 3, 5, 7 } }; foreach (var testCase in testPolynoms) { Assert.AreEqual( string.Join("", testCase.Value.Select(x => x.ToString()).ToArray()), string.Join("", ParseModule.parseAll(testCase.Key).Select(x => x.ToString()).ToArray()) ); } }
public void OperandsFromString() { var testPolynoms = new Dictionary <string, List <string> >() { ["5"] = new List <string>() { "+5" }, ["-5"] = new List <string>() { "-5" }, ["x+5"] = new List <string>() { "+x", "+5" }, ["2x2+3x+5"] = new List <string>() { "+2x2", "+3x", "+5" }, ["-2x2+3x+5"] = new List <string>() { "-2x2", "+3x", "+5" }, ["-12x2+3x+5"] = new List <string>() { "-12x2", "+3x", "+5" }, ["-12x3+3x2+5x+7"] = new List <string>() { "-12x3", "+3x2", "+5x", "+7" } }; foreach (var testCase in testPolynoms) { Assert.AreEqual( string.Join("", testCase.Value.ToArray()), string.Join("", ParseModule.operandsFromString(testCase.Key).ToArray()) ); } }
public ParseModule Parse(string?filePath, IEnumerable <Token> tokens, IList <ParseError> errors) { _filePath = filePath; _tokens.Clear(); _tokens.AddRange(tokens); TokenSource tokenSource = new TokenSource(_tokens, 0, errors); ParseModule outputModule = new ParseModule(); _symbolMap = outputModule.SymbolMap; ParseModuleHopper hopper = new ParseModuleHopper(outputModule); while (tokenSource.Peek().Type != TokenType.None) { ParseTopLevelStatement(ref tokenSource)?.AcceptTopLevelVisitor(hopper); } return(outputModule); }
public VMModule?CompileAndLink( ParseModule parsedModule, InteropResolver resolver, List <LinkError> linkErrors ) { Linker linker = new Linker(linkErrors); SetupLinkerWithInteropResolver(linker, resolver); ILModule compiledModule = Compile(parsedModule); linker.AddModule(compiledModule, export: true); VMModule linkedModule = linker.Link(); if (linkErrors.Count > 0) { return(null); } return(linkedModule); }
public void RunHeaderPass() { if (_parseModule != null) { throw new InvalidOperationException("RunHeaderPass has already been called"); } Parser parser = new Parser(); List <ParseError> errors = new List <ParseError>(); _parseModule = parser.Parse(_parseInput.GetFileInfo().FullPath, _parseInput.GetTokens(), errors); for (int i = 0, ilen = errors.Count; i < ilen; ++i) { ParseError error = errors[i]; Diagnostics.Report(Diagnostics.Severity.Error, $"{error.Start.Line + 1},{error.Start.Column + 1}: {error.Message}"); } if (_showAST) { PrintTreeVisitor treeVisitor = new PrintTreeVisitor(Console.Out); for (int i = 0, ilen = _parseModule.Functions.Count; i < ilen; ++i) { _parseModule.Functions[i].AcceptTopLevelVisitor(treeVisitor); } } if (!_job._forceCodegen) { Diagnostics.ThrowIfErrors(); } // Register module members with semantic analyzer _job._semanticAnalyzer.Register(_parseModule); }
public ParseModule ParseAndAnalyze( string source, string filename, List <ParseError> errors, ParseModule?headerModule = null ) { Lexer lexer = new Lexer(); List <Token> tokens = new List <Token>(); lexer.LexFullModule(source, tokens); Parser parser = new Parser(); ParseModule outputModule = parser.Parse(filename, tokens, errors); SemanticAnalyzer analyzer = new SemanticAnalyzer(errors); if (headerModule != null) { analyzer.Register(headerModule); } analyzer.Register(outputModule); analyzer.Process(outputModule); return(outputModule); }
public static void Main(string[] args) { bool isInteractive = false; bool showLex = false; bool showAst = false; bool showIl = false; bool withDebugger = false; bool forceCodegen = false; bool run = true; List <string> positionals = new List <string>(); List <int> argsToPass = new List <int>(); List <string> libraryNames = new List <string>(); List <string> librarySearchPaths = new List <string>(); for (int i = 0, ilen = args.Length; i < ilen; ++i) { string arg = args[i].Trim(); bool argFound = true; switch (arg) { case "-i": isInteractive = true; break; case "--showlex": showLex = true; break; case "--showast": showAst = true; break; case "--showil": showIl = true; break; case "-a": int argToPass; Int32.TryParse(args[++i], out argToPass); argsToPass.Add(argToPass); break; case "-l": libraryNames.Add(args[++i]); break; case "-L": librarySearchPaths.Add(args[++i]); break; case "--debugger": withDebugger = true; break; case "--force-codegen": // Indicate that the compiler should attempt to code gen IL, even if the parse stage failed. // This can be helpful to diagnose generated IL for code that doesn't compile outside of a // specific project due to undefined references. The parser tries its hardest to produce a // usable AST, even if there are semantic errors. Syntax errors? Not so much. forceCodegen = true; break; case "--skip-run": run = false; break; default: argFound = false; break; } if (!argFound) { if (!arg.Contains("-")) { positionals.Add(arg); } } } InteropResolver interopResolver = new InteropResolver(); const BindingFlags bindingFlags = BindingFlags.Public | BindingFlags.DeclaredOnly | BindingFlags.Static; foreach (string libraryName in libraryNames) { Assembly lib = LoadAssembly(libraryName, librarySearchPaths); interopResolver.ImportAssembly(lib, bindingFlags); } List <ParseModule> parseModules = new List <ParseModule>(positionals.Count); List <ParseError> errors = new List <ParseError>(); void ParseTokens(string?filename, List <Token> tokens) { Parser parser = new Parser(); ParseModule module = parser.Parse(filename, tokens, errors); parseModules.Add(module); } if (positionals.Count > 0) { // File mode - parse one or more files into ParseModules for (int i = 0, ilen = positionals.Count; i < ilen; ++i) { string filename = positionals[i]; Lexer lexer = new Lexer(); List <Token> tokens = new List <Token>(); string?input; filename = Path.GetFullPath(filename); using StreamReader reader = new StreamReader(filename); while ((input = reader.ReadLine()) != null) { LexLine(input, lexer, tokens, verbose: showLex); } lexer.FinishLex(tokens); ParseTokens(filename, tokens); } } else { // Interactive or StdIn mode - parse exactly one ParseModule Lexer lexer = new Lexer(); List <Token> tokens = new List <Token>(); string?input; if (isInteractive) { WritePrompt(); while ((input = Console.ReadLine()) != null) { LexLine(input, lexer, tokens, verbose: showLex); WritePrompt(); } } else { while ((input = Console.In.ReadLine()) != null) { LexLine(input, lexer, tokens, verbose: showLex); } } lexer.FinishLex(tokens); ParseTokens(null, tokens); } List <LinkError> linkErrors = new List <LinkError>(); Linker linker = new Linker(linkErrors); SemanticAnalyzer analyzer = new SemanticAnalyzer(errors); List <SemanticModule> semanticModules = new List <SemanticModule>(parseModules.Count); analyzer.Register(interopResolver.CreateHeaderModule()); foreach (ParseModule module in parseModules) { analyzer.Register(module); } foreach (ParseModule module in parseModules) { semanticModules.Add(analyzer.Process(module)); if (showAst) { PrintTreeVisitor treeVisitor = new PrintTreeVisitor(Console.Out); foreach (StructNode structNode in module.Structs) { structNode.AcceptTopLevelVisitor(treeVisitor); } foreach (FunctionDefinitionNode function in module.Functions) { function.AcceptTopLevelVisitor(treeVisitor); } } } for (int i = 0, ilen = errors.Count; i < ilen; ++i) { ParseError error = errors[i]; Console.Error.WriteLine($"{error.Start.Line + 1},{error.Start.Column + 1}: {error.Message}"); } if (errors.Count > 0 && !forceCodegen) { Environment.Exit(1); } foreach (SemanticModule module in semanticModules) { CodeGenerator generator = new CodeGenerator(module, analyzer.Context); ILModule ilmodule = generator.Generate(); if (showIl) { ilmodule.WriteListing(Console.Out); } linker.AddModule(ilmodule, export: true); } foreach (Binding binding in interopResolver.Bindings) { linker.AddFunctionBinding(binding.Prototype.Name, binding.Implementation, export: false); } VMModule vmModule = linker.Link(); if (linkErrors.Count > 0) { foreach (LinkError error in linkErrors) { Console.Error.WriteLine($"Link error: {error.Message}"); } Environment.Exit(1); } List <string> loadErrors = new List <string>(); if (!interopResolver.PrepareForExecution(vmModule, loadErrors)) { foreach (string error in loadErrors) { Console.Error.WriteLine($"Load error: {error}"); } Environment.Exit(1); } if (!run) { Environment.Exit(0); return; } using VirtualMachine vm = new VirtualMachine(); if (withDebugger) { Debugger debugger = new Debugger(); VMDebugger vmDebugger = new VMDebugger(debugger, vm); vmDebugger.Break += () => HandleBreak(vm, debugger, vmDebugger); vmDebugger.Pause(); } // TODO: Args to pass is broken if (!vm.Call(vmModule, "main", new byte[0], success => HandleExecutionFinished(vm, success))) { Console.Error.WriteLine("Failed to call main function."); Environment.Exit(-1); } }
public SemanticModule(ParseModule baseModule, Dictionary <IExpressionNode, IType> expressionResultTypes) { BaseModule = baseModule; ExpressionResultTypes = expressionResultTypes; }
public ILModule Compile(ParseModule module) { throw new NotImplementedException(); // CodeGenerator generator = new CodeGenerator(module, context); // return generator.Generate(); }