public void ValidIDReservedWord() { var inputString = new InputString("abc"); var idTokenGenerator = new IDReservedWordTokenGenerator(); var currentSymbol = inputString.GetNextSymbol(); do { Assert.True(idTokenGenerator.validStart(currentSymbol)); currentSymbol = inputString.GetNextSymbol(); }while(currentSymbol.character != '\0'); inputString = new InputString("hola Hola abd142_ hola_ adios_1542 true false"); var expectedTypes = new TokenType[] { TokenType.ID, TokenType.ID, TokenType.ID, TokenType.ID, TokenType.ID, TokenType.LIT_BOOL, TokenType.LIT_BOOL }; var expectedLexemes = new string[] { "hola", "Hola", "abd142_", "hola_", "adios_1542", "true", "false" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == expectedTypes[i]); Assert.True(currentToken.lexeme == expectedLexemes[i]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + expectedTypes[i]); currentToken = lexer.GetNextToken(); i++; }while(currentToken.type != TokenType.EOF); }
public void ValidLiteralIntDecimal() { var inputString = new InputString("7894561230"); var literalIntTokenGenerator = new LiteralIntTokenGenerator(); var currentSymbol = inputString.GetNextSymbol(); do { Assert.True(literalIntTokenGenerator.validStart(currentSymbol)); currentSymbol = inputString.GetNextSymbol(); }while(currentSymbol.character != '\0'); inputString = new InputString("123 12 8 0"); var expectedLexemes = new string[] { "123", "12", "8", "0" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_INT); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_INT); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidPuntuation() { var inputString = new InputString(@" : // ':' , // ',' ; // ';' ( // '(' ) // ')' { // '{' } // '}' [ // '[' ] // ']'" ); var expectedLexemes = new string[] { ":" , "," , ";" , "(" , ")" , "{" , "}" , "[" , "]" }; var expectedTypes = new TokenType[] { TokenType.PUNT_COLON , TokenType.PUNT_COMMA , TokenType.PUNT_END_STATEMENT_SEMICOLON , TokenType.PUNT_PAREN_OPEN , TokenType.PUNT_PAREN_CLOSE , TokenType.PUNT_CURLY_BRACKET_OPEN , TokenType.PUNT_CURLY_BRACKET_CLOSE , TokenType.PUNT_SQUARE_BRACKET_OPEN , TokenType.PUNT_SQUARE_BRACKET_CLOSE }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == expectedTypes[i]); Assert.True(currentToken.lexeme == expectedLexemes[i]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + expectedTypes[i]); currentToken = lexer.GetNextToken(); i++; }while(currentToken.type != TokenType.EOF); }
void validComments() { var inputString = new InputString(@" /*fds f sdf sd f sd f sdf int float */"); // var expectedLexemes = new string[]{"'a'", "'b'","'\\a'","'\\b'"}; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); do { // Assert.True(currentToken.type == TokenType.LIT_CHAR); // Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_CHAR); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidLiteralString() { var inputString = new InputString("\"hola 15245\" \"adios dsd sa \\t \\v sta\""); var expectedLexemes = new string[] { "\"hola 15245\"", "\"adios dsd sa \\t \\v sta\"" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_STRING); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_STRING); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidLiteralFloat() { var inputString = new InputString("123f 12F 8.58f 0.05F"); var expectedLexemes = new string[] { "123f", "12F", "8.58f", "0.05F" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_FLOAT); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_FLOAT); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidLiteralChar() { var inputString = new InputString("'a' 'b' '\\a' '\\b'"); var expectedLexemes = new string[] { "'a'", "'b'", "'\\a'", "'\\b'" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_CHAR); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_CHAR); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidLiteralIntBinary() { var inputString = new InputString("0b0100 0B01110"); var expectedLexemes = new string[] { "0b0100", "0B01110" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_INT); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_INT); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
void ValirReadAllTokensFromFile() { var inputString = new InputFile(@"C:\Users\Kenystev\Documents\Compiladores\CStoJS_Compiler\lexer_test.txt"); var tokenGenerators = Resources.getTokenGenerators(); var lexer = new Compiler.Lexer(inputString, tokenGenerators); Token token = lexer.GetNextToken(); while (token.type != TokenType.EOF) { System.Console.Out.WriteLine(token); token = lexer.GetNextToken(); } System.Console.Out.WriteLine(token); Assert.True(token.type == TokenType.EOF); }
public void ValidLiteralVerbatimString() { var inputString = new InputString(@"@""Hola como """"estan todos"""" nada \mas\ que 'hacer',....."" @""otra ves{](0)"""); var expectedLexemes = new string[] { @"@""Hola como """"estan todos"""" nada \mas\ que 'hacer',.....""", @"@""otra ves{](0)""" }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == TokenType.LIT_STRING); Assert.True(currentToken.lexeme == expectedLexemes[i++]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + TokenType.LIT_STRING); currentToken = lexer.GetNextToken(); }while(currentToken.type != TokenType.EOF); }
public void ValidOperators() { var inputString = new InputString(@"= += -= *= /= %= <<= >>= &= ^= |= & | ^ ~ << >> && || ! sizeof ? ?? is as == != > < >= <= + - * / % ++ --"); var expectedLexemes = new string[] { "=" , "+=" , "-=" , "*=" , "/=" , "%=" , "<<=" , ">>=" , "&=" , "^=" , "|=" , "&" , "|" , "^" , "~" , "<<" , ">>" , "&&" , "||" , "!" , "sizeof" , "?" , "??" , "is" , "as" , "==" , "!=" , ">" , "<" , ">=" , "<=" , "+" , "-" , "*" , "/" , "%" , "++" , "--" }; var expectedTypes = new TokenType[] { TokenType.OP_ASSIGN , TokenType.OP_ASSIGN_SUM , TokenType.OP_ASSIGN_SUBSTRACT , TokenType.OP_ASSIGN_MULTIPLICATION , TokenType.OP_ASSIGN_DIVISION , TokenType.OP_ASSIGN_MODULO , TokenType.OP_ASSIGN_SHIFT_LEFT , TokenType.OP_ASSIGN_SHIFT_RIGHT , TokenType.OP_ASSIGN_BITWISE_AND , TokenType.OP_ASSIGN_XOR , TokenType.OP_ASSIGN_BITWISE_OR , TokenType.OP_BITWISE_AND , TokenType.OP_BITWISE_OR , TokenType.OP_XOR , TokenType.OP_BITWISE_NOT , TokenType.OP_SHIFT_LEFT , TokenType.OP_SHIFT_RIGHT , TokenType.OP_AND , TokenType.OP_OR , TokenType.OP_NOT , TokenType.OP_SIZEOF , TokenType.OP_TERNARY , TokenType.OP_NULL_COALESCING , TokenType.OP_IS , TokenType.OP_AS , TokenType.OP_EQUAL , TokenType.OP_DISTINCT , TokenType.OP_MORE_THAN , TokenType.OP_LESS_THAN , TokenType.OP_MORE_AND_EQUAL_THAN , TokenType.OP_LESS_AND_EQUAL_THAN , TokenType.OP_SUM , TokenType.OP_SUBSTRACT , TokenType.OP_MULTIPLICATION , TokenType.OP_DIVISION , TokenType.OP_MODULO , TokenType.OP_PLUS_PLUS , TokenType.OP_MINUS_MINUS }; var lexer = new Compiler.Lexer(inputString, Resources.getTokenGenerators()); var currentToken = lexer.GetNextToken(); int i = 0; do { Assert.True(currentToken.type == expectedTypes[i]); Assert.True(currentToken.lexeme == expectedLexemes[i]); Console.WriteLine("lexeme: " + currentToken.lexeme + " | TokenType: " + expectedTypes[i]); currentToken = lexer.GetNextToken(); i++; }while(currentToken.type != TokenType.EOF); }
public Parser(Lexer lexer) { this.lexer = lexer; varDictionary = new Dictionary <string, NodeIdentifier>(); }
public Parser(string expression) { this.lexer = new Lexer(expression); }
public Parser(Lexer lexer) { this.lexer = lexer; this.token = lexer.GetNextToken(); vars = new Dictionary <string, float>(); }
public string Compile(string source, string path, string tokensJson, int DebugLevel = 0) { /* * System.Console.WriteLine("SOURCE: \n\n\n" + source); * System.Console.WriteLine("PATH: \n\n\n" + path); * System.Console.WriteLine("TOKENSJSON: \n\n\n" + tokensJson); * System.Console.WriteLine("DEBUGLEVEL: \n\n\n" + DebugLevel); */ if (DebugLevel >= 1) { Console.WriteLine("Compiler running"); } DateTime tStart = DateTime.Now; DateTime t1 = DateTime.Now; Lexer lexer = new Lexer(tokensJson); if (DebugLevel >= 1) { Console.WriteLine("Running Lexer"); } var tokens = lexer.Analyse(source); if (DebugLevel >= 1) { foreach (Token t in tokens) { Console.WriteLine(t.Name); } } if (DebugLevel >= 1) { Console.WriteLine("Lexer on main: " + DateTime.Now.Subtract(t1).TotalMilliseconds + " ms"); } t1 = DateTime.Now; if (DebugLevel >= 2) { Console.WriteLine(string.Join(" ", tokens.Select(t => t.Name))); Console.WriteLine(""); Console.WriteLine("--------------------------------------"); Console.WriteLine(""); } if (DebugLevel >= 1) { Console.WriteLine("Running Preprocessor"); } Preprocessor preprocessor = new Preprocessor(); tokens = preprocessor.Process(lexer, path, tokens); if (DebugLevel >= 1) { Console.WriteLine("Preprocessor: " + DateTime.Now.Subtract(t1).TotalMilliseconds + " ms"); } t1 = DateTime.Now; if (DebugLevel >= 2) { Console.WriteLine("Tokens with imports:"); Console.WriteLine(string.Join(" ", tokens.Select(t => t.Name))); Console.WriteLine(""); Console.WriteLine("--------------------------------------"); Console.WriteLine(""); } if (DebugLevel >= 1) { Console.WriteLine("Running Parser"); } ProgramParser parser = new ProgramParser(); var tokenEnumerator = tokens.Select(t => new Parsing.Data.Token() { Name = t.Name, Value = t.Value, FileName = t.FileName, Row = t.Row, Column = t.Column }).GetEnumerator(); tokenEnumerator.MoveNext(); var parseTree = parser.ParseProgram(tokenEnumerator); if (DebugLevel >= 1) { Console.WriteLine("Parser: " + DateTime.Now.Subtract(t1).TotalMilliseconds + " ms"); } t1 = DateTime.Now; var parseTreeLines = parseTree.Accept(new Parsing.Visitors.TreePrintVisitor()); if (DebugLevel >= 2) { foreach (var line in parseTreeLines) { Console.WriteLine(line); } Console.WriteLine(""); Console.WriteLine("--------------------------------------"); Console.WriteLine(""); } if (DebugLevel >= 1) { Console.WriteLine("Running Ast Translator"); } var astTranslator = new Translation.ProgramToAST.ProgramToASTTranslator(); AST.Data.AST ast = astTranslator.TranslatetoAST(parseTree) as AST.Data.AST; if (ast == null) { throw new Translation.TranslationException(astTranslator.RuleError); } if (DebugLevel >= 1) { Console.WriteLine("tangToAST: " + DateTime.Now.Subtract(t1).TotalMilliseconds + " ms"); } t1 = DateTime.Now; var astLines = ast.Accept(new AST.Visitors.TreePrintVisitor()); if (DebugLevel >= 2) { foreach (var line in astLines) { Console.WriteLine(line); } Console.WriteLine(""); Console.WriteLine("--------------------------------------"); Console.WriteLine(""); } if (DebugLevel >= 1) { Console.WriteLine("Running C Translator"); } var cTranslator = new Translation.ASTToC.ASTToCTranslator(); C.Data.C c = cTranslator.Translate(ast) as C.Data.C; if (DebugLevel >= 1) { Console.WriteLine("astToC: " + DateTime.Now.Subtract(t1).TotalMilliseconds + " ms"); } t1 = DateTime.Now; var cLines = c.Accept(new C.Visitors.TreePrintVisitor()); if (DebugLevel >= 2) { foreach (var line in cLines) { Console.WriteLine(line); } Console.WriteLine(""); Console.WriteLine("--------------------------------------"); Console.WriteLine(""); } var cStr = c.Accept(new C.Visitors.TextPrintVisitor()); if (DebugLevel >= 1) { foreach (var term in cStr) { Console.WriteLine(term); } Console.WriteLine(); } if (DebugLevel >= 1) { Console.WriteLine("Compiler run-time: " + DateTime.Now.Subtract(tStart).TotalMilliseconds + " ms"); } return(string.Join("\n", cStr)); /*// File path relative to where the debug file is located which is in a land far, far away * Lexer lexer = new Lexer(AppContext.BaseDirectory + "/../../../../../docs/tang.tokens.json"); * * var tokens = lexer.Analyse(source); * Preprocessor preprocessor = new Preprocessor(); * tokens = preprocessor.Process(lexer, tokens); * ProgramParser parser = new ProgramParser(); * var tokenEnumerator = tokens.Select(t => new Parsing.Data.Token() { Name = t.Name, Value = t.Value }).GetEnumerator(); * tokenEnumerator.MoveNext(); * var parseTree = parser.ParseProgram(tokenEnumerator); * var astTranslator = new Translation.ProgramToAST.ProgramToASTTranslator(); * AST.Data.AST ast = astTranslator.Translatep(parseTree) as AST.Data.AST; * var cTranslator = new Translation.ASTToC.ASTToCTranslator(); * C.Data.C c = cTranslator.Translate(ast) as C.Data.C; * var cStr = c.Accept(new C.Visitors.TextPrintVisitor()); * * return string.Join("\n", cStr);*/ }