static void Main() { var lexicalAnalyzer = new LexicalAnalyzer(); lexicalAnalyzer.Analyze(); Console.ReadLine(); }
static void Main(string[] args) { const string sourcecode = "source-code.txt"; const string tokenset = "token-set.txt"; const string intermediatecode = "intermediate-code.txt"; string[] code = Filling.Read(sourcecode); LexicalAnalyzer LA = new LexicalAnalyzer(code); LA.analyze(tokenset); string[] tokens = Filling.Read(tokenset); SyntaxAnalyzer SA = new SyntaxAnalyzer(tokens); int tokenIndex = SA.analyze(); //int tokenIndex = 0; if (tokenIndex == -1) { List <Token> _tokens = new List <Token>(); foreach (string token in tokens) { _tokens.Add(new Token(token)); } TestSemanticTree.MainSemanticTree.parse(_tokens.ToArray()); List <ErrorRecord> errors = TestSemanticTree.MainSemanticTree.errors; if (errors.ToArray().Length == 0) { ICGTree.MainSyntaxTree.analyze(intermediatecode, _tokens.ToArray()); } else { foreach (ErrorRecord error in errors.ToArray()) { Console.WriteLine(error.identifier + "on line# " + error._token.line + "(" + error.type + ")"); } } } else { Token token = new Token(tokens[(tokenIndex < tokens.Length) ? tokenIndex : tokenIndex - 1]); string line = code[token.line - 1]; int index = line.IndexOf(token.valuepart); string error = string.Empty; while (index > 0) { index--; error += " "; } error += "^"; Console.WriteLine(line); Console.WriteLine(error); } }
/// <summary> /// Constructor for the Syntax Analyzer class. /// </summary> /// <param name="commandLineFileName"></param> public SyntaxAnalyzer(string commandLineFileName) { lexicalAnalyzer = new LexicalAnalyzer(commandLineFileName); symbolTable = new SymbolTable(); intermediateCodeGenerator = new IntermediateCodeGenerator(); intermediateCodeGenerator.SetupTacFile(commandLineFileName); // Prime the parser lexicalAnalyzer.GetNextToken(); }
static void Main(string[] args) { const string sourcecode = "source-code.txt"; const string tokenset = "token-set.txt"; const string intermediatecode = "intermediate-code.txt"; string[] code = Filling.Read(sourcecode); LexicalAnalyzer LA = new LexicalAnalyzer(code); LA.analyze(tokenset); string[] tokens = Filling.Read(tokenset); SyntaxAnalyzer SA = new SyntaxAnalyzer(tokens); int tokenIndex = SA.analyze(); //int tokenIndex = 0; if (tokenIndex == -1) { List<Token> _tokens = new List<Token>(); foreach (string token in tokens) _tokens.Add(new Token(token)); TestSemanticTree.MainSemanticTree.parse(_tokens.ToArray()); List<ErrorRecord> errors= TestSemanticTree.MainSemanticTree.errors; if (errors.ToArray().Length == 0) { ICGTree.MainSyntaxTree.analyze(intermediatecode, _tokens.ToArray()); } else { foreach (ErrorRecord error in errors.ToArray()) Console.WriteLine(error.identifier + "on line# " + error._token.line + "(" + error.type + ")"); } } else { Token token = new Token(tokens[(tokenIndex < tokens.Length) ? tokenIndex : tokenIndex - 1]); string line = code[token.line - 1]; int index = line.IndexOf(token.valuepart); string error = string.Empty; while (index > 0) { index--; error += " "; } error += "^"; Console.WriteLine(line); Console.WriteLine(error); } }
static void Main(string[] args) { string testFile = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"test_code.txt"); string outputFile = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"output.txt"); string outputFileCsv = Path.Combine(Path.GetDirectoryName(Assembly.GetExecutingAssembly().Location), @"output.csv"); var sourceCode = File.ReadAllText(testFile); var lexicalAnalyzer = new LexicalAnalyzer(); var tokens = lexicalAnalyzer.Analyze(sourceCode); var syntaxAnalyzer = new SyntaxAnalyzer(); var token = syntaxAnalyzer.Analyze(tokens); if (token == null) { Console.WriteLine("Source code parsed Successfully!"); } else { Console.WriteLine($"Error! Invalid character \"{token.Value}\" found on Line number \"{token.LineNumber}\""); } //Write to file //var fileText = "LINE NUMBER,CLASS NAME,VALUE\r\n"; //foreach (var token in tokens) //{ // fileText += token.ToCSVString(); //} //File.WriteAllText(outputFileCsv, fileText); //fileText = ""; //foreach (var token in tokens) //{ // fileText += token.ToString(); //} //File.WriteAllText(outputFile, fileText); //Print to console foreach (var t in tokens) { Console.WriteLine(t.ToString()); } Console.ReadLine(); }
static void Main(string[] args) { string input = File.ReadAllText(args[0]); Stopwatch sw = Stopwatch.StartNew(); LexicalAnalyzer analyzer = new LexicalAnalyzer(LexicalLanguage.GetLanguage(), input); BottomUpParser parser = new BottomUpParser(analyzer); parser.Parse(); Console.WriteLine(new CodeGenerator().GenerateAsString(parser.GetIL())); sw.Stop(); Console.WriteLine($"Done (took {sw.ElapsedMilliseconds} milliseconds)"); Console.ReadLine(); Console.ReadLine(); Console.ReadLine(); Console.ReadLine(); }