private static bool Expressionz() { if (Tokens.GetToken().lexeme == "=") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!AssignmentExpression()) { SyntaxError(Tokens.GetToken(), "assignment expression"); } return(true); } else if (Tokens.GetToken().lexeme == "and" || Tokens.GetToken().lexeme == "or" || Tokens.GetToken().type == Token.Type.BoolSymbol || Tokens.GetToken().type == Token.Type.MathSymbol) { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "expression"); } return(true); } return(false); }
private static bool AssignmentExpression() { if (Tokens.GetToken().lexeme == "new") { Tokens.NextToken(); if (!type()) { SyntaxError(Tokens.GetToken(), "a type"); return(false); } currentType = Tokens.GetToken().lexeme; Tokens.NextToken(); if (!NewDeclaration()) { SyntaxError(Tokens.GetToken(), "new declaration"); } return(true); } else if (Tokens.GetToken().lexeme == "itoa" || Tokens.GetToken().lexeme == "atoi") { string lexeme = Tokens.GetToken().lexeme; Tokens.NextToken(); if (Tokens.GetToken().lexeme != "(") { SyntaxError(Tokens.GetToken(), "("); } if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "expression"); } if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } Tokens.NextToken(); if (semanticPass && (lexeme == "atoi")) { SemanticActions.ShuntYardAll(); SemanticActions.atoi(); } else if (semanticPass && (lexeme == "itoa")) { SemanticActions.ShuntYardAll(); SemanticActions.itoa(); } return(true); } else if (Expression()) { return(true); } return(false); }
private static bool Fn_Arr_Member() { if (Tokens.GetToken().lexeme == "(") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); SemanticActions.BAL(); } Tokens.NextToken(); Argument_List(); if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.EAL(); SemanticActions.func(); } Tokens.NextToken(); return(true); } else if (Tokens.GetToken().lexeme == "[") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } currentType = "@" + currentType; Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != "]") { SyntaxError(Tokens.GetToken(), "]"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.arr(); } Tokens.NextToken(); return(true); } return(false); }
private static bool NewDeclaration() { if (Tokens.GetToken().lexeme == "(") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); SemanticActions.BAL(); } Tokens.NextToken(); Argument_List(); if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.EAL(); SemanticActions.NewObj(); } Tokens.NextToken(); return(true); } else if (Tokens.GetToken().lexeme == "[") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "expression"); } if (Tokens.GetToken().lexeme != "]") { SyntaxError(Tokens.GetToken(), "]"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.new_arr(); } Tokens.NextToken(); return(true); } return(false); }
private static bool Argument_List() { if (!Expression()) { return(false); } while (Tokens.GetToken().lexeme == ",") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } } return(true); }
private static bool Expression() { string lexeme = Tokens.GetToken().lexeme; if (lexeme == "(") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "expression"); } if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } if (semanticPass) { SemanticActions.ShuntYardAll(); } Tokens.NextToken(); Expressionz(); return(true); } else if (lexeme == "+" || lexeme == "-" || lexeme == "true" || lexeme == "false" || lexeme == "null" || lexeme == "this" || Tokens.GetToken().type == Token.Type.Number || Tokens.GetToken().type == Token.Type.Character) { if (Tokens.GetToken().lexeme == "this") { if (semanticPass) { SemanticActions.iPush(Tokens.GetToken(), scope); SemanticActions.iExist(); } Tokens.NextToken(); Member_Refz(); Expressionz(); return(true); } else if (lexeme == "true" || lexeme == "false" || lexeme == "null") { string[] data = new string[2]; data[0] = "returnType:bool"; data[1] = "accessMod:public"; Symbol symbol; if (lexeme == "true") { symbol = new Symbol("g", ("Btrue"), "1", "blit", data); } else if (lexeme == "false") { symbol = new Symbol("g", ("Bfalse"), "0", "blit", data); } else { data[0] = "returnType:null"; symbol = new Symbol("g", "null", "2018", "null", data); } SymbolTable.Add(symbol); if (semanticPass) { SemanticActions.lPush(symbol, Tokens.GetToken(), scope); } Tokens.NextToken(); Expressionz(); return(true); } else if (numeric_literal()) { Tokens.NextToken(); Expressionz(); return(true); } else if (character_literal()) { Tokens.NextToken(); Expressionz(); return(true); } return(false); } else if (Tokens.GetToken().type == Token.Type.Identifier) { currentIdentifier = Tokens.GetToken().lexeme; if (semanticPass) { SemanticActions.iPush(Tokens.GetToken(), scope); } Tokens.NextToken(); Fn_Arr_Member(); if (semanticPass) { SemanticActions.iExist(); } Member_Refz(); Expressionz(); return(true); } return(false); }
private static bool Statement() { if (Tokens.GetToken().lexeme == "{") { Tokens.NextToken(); while (Statement()) { ; } if (Tokens.GetToken().lexeme != "}") { SyntaxError(Tokens.GetToken(), "}"); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "if") { Tokens.NextToken(); if (Tokens.GetToken().lexeme != "(") { SyntaxError(Tokens.GetToken(), "("); } if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.if_(Tokens.GetToken()); } Tokens.NextToken(); if (!Statement()) { SyntaxError(Tokens.GetToken(), "a statement"); } if (Tokens.GetToken().lexeme == "else") { Tokens.NextToken(); if (semanticPass) { string SKIPELSE = ICode.SKIPELSE + ICode.labelCounter++ + " "; ICode.JMP(SKIPELSE); ICode.StackElse(SKIPELSE); ICode.Print(ICode.StackIf()); } if (!Statement()) { SyntaxError(Tokens.GetToken(), "a statement"); } if (semanticPass) { ICode.Print(ICode.StackElse()); } } else if (semanticPass) { ICode.Print(ICode.StackIf()); } return(true); } if (Tokens.GetToken().lexeme == "while") { if (semanticPass) { string BEGIN = ICode.BEGIN + ICode.labelCounter++ + " "; ICode.Print(BEGIN); ICode.StackWhile(BEGIN); } Tokens.NextToken(); if (Tokens.GetToken().lexeme != "(") { SyntaxError(Tokens.GetToken(), "("); } if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } if (semanticPass) { SemanticActions.ShuntYardAll(); SemanticActions.while_(Tokens.GetToken()); } Tokens.NextToken(); if (!Statement()) { SyntaxError(Tokens.GetToken(), "a statement"); } if (semanticPass) { string BEGIN = ICode.StackWhile(); ICode.JMP(BEGIN); string ENDWHILE = ICode.StackEndWhile(); ICode.Print(ENDWHILE); } return(true); } if (Tokens.GetToken().lexeme == "return") { Tokens.NextToken(); Expression(); if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.return_(Tokens.GetToken(), scope); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "cout") { Tokens.NextToken(); if (Tokens.GetToken().lexeme != "<<") { SyntaxError(Tokens.GetToken(), "<<"); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.cout(); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "cin") { Tokens.NextToken(); if (Tokens.GetToken().lexeme != ">>") { SyntaxError(Tokens.GetToken(), ">>"); } Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.cin(); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "spawn") { Tokens.NextToken(); if (!Expression()) { SyntaxError(Tokens.GetToken(), "an expression"); } if (Tokens.GetToken().lexeme != "set") { SyntaxError(Tokens.GetToken(), "set"); } Tokens.NextToken(); if (Tokens.GetToken().type != Token.Type.Identifier) { SyntaxError(Tokens.GetToken(), "an identifier"); } if (semanticPass) { SemanticActions.iPush(Tokens.GetToken(), scope); } Tokens.NextToken(); if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.spawn(); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "block") { Tokens.NextToken(); if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.block(); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "lock") { Tokens.NextToken(); if (Tokens.GetToken().type != Token.Type.Identifier) { SyntaxError(Tokens.GetToken(), "an identifier"); } if (semanticPass) { SemanticActions.iPush(Tokens.GetToken(), scope); } Tokens.NextToken(); if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.lock_(); } Tokens.NextToken(); return(true); } if (Tokens.GetToken().lexeme == "release") { Tokens.NextToken(); if (Tokens.GetToken().type != Token.Type.Identifier) { SyntaxError(Tokens.GetToken(), "an identifier"); } if (semanticPass) { SemanticActions.iPush(Tokens.GetToken(), scope); } Tokens.NextToken(); if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.release(); } Tokens.NextToken(); return(true); } if (Expression()) { if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.EOE(); } Tokens.NextToken(); return(true); } return(false); }
private static bool VariableDeclaration() { if (Tokens.PeekToken().type != Token.Type.Identifier) { return(false); } if (!type()) { return(false); } if (semanticPass) { SemanticActions.tExist(); } Tokens.NextToken(); currentIdentifier = Tokens.GetToken().lexeme; identifierToken = Tokens.GetToken(); Tokens.NextToken(); if (Tokens.GetToken().lexeme == "[") { Tokens.NextToken(); currentType = "@" + currentType; if (Tokens.GetToken().lexeme != "]") { SyntaxError(Tokens.GetToken(), "]"); } Tokens.NextToken(); } string[] data = new string[2]; data[0] = "returnType:" + currentType; data[1] = "accessMod:" + accessMod; Symbol symbol = new Symbol(scope, ("L" + uniqueCounter++), currentIdentifier, "lvar", data); identifierSymbol = symbol; if (semanticPass) { SemanticActions.dup(identifierToken, scope); SemanticActions.vPush(symbol, identifierToken, scope); } SymbolTable.Add(symbol); if (Tokens.GetToken().lexeme == "=") { if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!AssignmentExpression()) { SyntaxError(Tokens.GetToken(), "assignment expression"); } } if (Tokens.GetToken().lexeme != ";") { SyntaxError(Tokens.GetToken(), ";"); } if (semanticPass) { SemanticActions.EOE(); } Tokens.NextToken(); return(true); }
private static bool FieldDeclaration() { if (Tokens.GetToken().lexeme == "(") { Tokens.NextToken(); parameters = ""; string methodType = currentType; offset = 0; sizeParameters = 0; ParameterList(); methodSize = 0; string[] data = new string[2]; data[0] = "returnType:" + methodType; data[1] = "accessMod:" + accessMod; Symbol symbol = new Symbol(scope, ("M" + uniqueCounter++), currentIdentifier, "method", data); symbol.parameters = parameters; currentMethodName = "M"; if (semanticPass) { ICode.FUNC(symbol.symid); } if (Tokens.GetToken().lexeme != ")") { SyntaxError(Tokens.GetToken(), ")"); } Tokens.NextToken(); if (!MethodBody()) { SyntaxError(Tokens.GetToken(), "method body"); } symbol.size = methodSize; SymbolTable.Add(symbol); offset = 0; methodSize = 0; return(true); } else { bool flag = false; if (Tokens.GetToken().lexeme == "[") { flag = true; if (Tokens.PeekToken().lexeme != "]") { return(false); } Tokens.NextToken(); Tokens.NextToken(); } if (semanticPass) { SemanticActions.vPush(identifierSymbol, identifierToken, scope); } if (Tokens.GetToken().lexeme == "=") { flag = true; if (semanticPass) { SemanticActions.oPush(Tokens.GetToken()); } Tokens.NextToken(); if (!AssignmentExpression()) { return(false); } } if ((Tokens.GetToken().lexeme == ";")) { if (semanticPass) { SemanticActions.EOE(); } Tokens.NextToken(); return(true); } if (flag) { SyntaxError(Tokens.GetToken(), ";"); } } return(false); }