public SyntaxParserService(LexicalAnalyzerService lexAnalyzer, SymbolTable symbolTable) { this.LexicalAnalyzer = lexAnalyzer; this.SymbolTable = symbolTable; this.GlobalStrings = new Dictionary<string, string>(); MethodParamSize = new Dictionary<string, int>(); MethodLocalSize = new Dictionary<string, int>(); }
public void Compile(string fileName) { var parent = Directory.GetParent(fileName); var dir = Directory.CreateDirectory($"output_{parent.Name}"); var fileNameWithoutExtension = Path.GetFileNameWithoutExtension(fileName); var tacFilePath = $"{dir.FullName}/{fileNameWithoutExtension}.tac"; var asmFilePath = $"{dir.FullName}/{fileNameWithoutExtension}.asm"; try { FileIn = File.CreateText(tacFilePath); var streamReader = new StreamReader(fileName); var lexAnalyzer = new LexicalAnalyzerService(streamReader); var symbolTable = new SymbolTable { Printer = (val) => { Console.WriteLine(val); } }; var syntaxParser = new SyntaxParserService(lexAnalyzer, symbolTable); PrintSourceCode(File.ReadAllText(fileName)); syntaxParser.Parse(); FileIn.Close(); Intelx86GeneratorService.Generate( File.ReadAllLines(tacFilePath), syntaxParser.GlobalStrings, syntaxParser.MethodLocalSize, syntaxParser.MethodParamSize, (str) => { if (File.Exists(asmFilePath)) { File.Delete(asmFilePath); } File.AppendAllText(asmFilePath, str); Console.WriteLine(str); }); } catch (Exception ex) { // Delete out file if (File.Exists(tacFilePath)) { FileIn.Close(); File.Delete(tacFilePath); } Print("Oops, there seems to be something wrong.\n\n", ErrorColor); Print(ex.Message, ErrorColor); } }
public void TestArithmeticOperators() { var reader = CreateStreamReaderWith(ARITHMETIC_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Plus, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.PlusPlus, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.PlusEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Minus, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.MinusMinus, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.MinusEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Divide, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.DivideEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Multiplication, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.MultiplicationEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Modulo, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.ModuloEqual, token.Type); }
private void TraverseAllTokens(LexicalAnalyzerService lexAnalyzer) { var count = 1L; var maxCount = 20; Console.WriteLine(Token.PRINT_HEADER); while (true) { var token = lexAnalyzer.GetNextToken(); if (count % maxCount == 0) { Console.WriteLine($"\nPage #{count / maxCount}\nPlease press enter to continue ... "); Console.ReadLine(); } if (token.HasError) { Print(token, ErrorColor); } else if (token.Type == TokenType.Unknown) { Print(token, InfoColor); } else { Print(token, NormalColor); } count++; if (token.Type == TokenType.EndOfFile) { break; } } }
public void TestBooleanOperators() { var reader = CreateStreamReaderWith(BOOLEAN_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BooleanNot, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BooleanNotEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BooleanAnd, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BooleanOr, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BooleanEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.EndOfFile, token.Type); }
public void TestComparisonOperators() { var reader = CreateStreamReaderWith(COMPARISON_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LessThanOrEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LessThan, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.GreaterThanOrEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.GreaterThan, token.Type); }
public void TestOtherOperators() { var reader = CreateStreamReaderWith(OTHER_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.OpenSquareBracket, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.CloseSquareBracket, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.OpenParen, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.CloseParen, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.OpenCurlyBrace, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.CloseCurlyBrace, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Comma, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Dot, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Colon, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Semicolon, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.QuestionMark, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.Assignment, token.Type); }
public void TestBitwiseOperators() { var reader = CreateStreamReaderWith(BITWISE_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalNot, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalOr, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalOrEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalAnd, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalAndEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalExclusiveOr, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.LogicalExclusiveOrEqual, token.Type); }
public void TestLogicalShifOperators() { var reader = CreateStreamReaderWith(LOGICAL_SHIFT_OPERATORS); var lexAnalyzer = new LexicalAnalyzerService(reader); var token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BitwiseLeftShiftEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BitwiseLeftShift, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.UnsignedRightShiftEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.UnsignedRightShift, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BitwiseRightShiftEqual, token.Type); token = lexAnalyzer.GetNextToken(); Assert.AreEqual(TokenType.BitwiseRightShift, token.Type); }