internal ParenthesizedExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // ( TERN_EXPR ) // ^ bool parens = false; if (lexer.TokenType == TokenEnum.BRACKETOPEN) { parens = true; _scope = scope.Next; lexer.Next(); // BRACKETOPEN } else { _scope = scope; } _expression = new Expression(_scope, lexer).Get(); if (parens) { if (lexer.TokenType != TokenEnum.BRACKETCLOSE) { throw new ParseException(lexer, TokenEnum.BRACKETCLOSE); } else { lexer.Next(); // BRACKETCLOSE } } }
internal PrimaryExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { TokenEnum t = lexer.TokenType; switch (t) { case TokenEnum.BRACKETOPEN: _child = new ParenthesizedExpression(scope, lexer).Get(); break; case TokenEnum.FUNCTION: _child = new Function(scope, lexer).Get(); break; case TokenEnum.VARIABLE: _child = new Variable(scope, lexer).Get(); break; case TokenEnum.NEW: _child = new NewObjectExpression(scope, lexer).Get(); break; //Literals case TokenEnum.NULLLITERAL: _child = new NullLiteral(scope, lexer).Get(); break; case TokenEnum.BOOLEANLITERAL: _child = new BoolLiteral(scope, lexer).Get(); break; case TokenEnum.DECIMALINTEGERLITERAL: _child = new IntLiteral(scope, lexer).Get(); break; case TokenEnum.HEXINTEGERLITERAL: _child = new HexLiteral(scope, lexer).Get(); break; case TokenEnum.REALLITERAL: _child = new SingleLiteral(scope, lexer).Get(); break; case TokenEnum.STRINGLITERAL: _child = new StringLiteral(scope, lexer).Get(); break; case TokenEnum.BRACEOPEN: _child = new ArrayLiteral(scope, lexer).Get(); break; default: throw new ParseException(lexer); } }
internal LogicalAndExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // EQUALEXPR && EQUALEXPR ... _first = GetNext(scope, lexer); while (lexer.TokenType == TokenEnum.AMPAMP // && ) { lexer.Next(); //AMPAMP _set.Add(GetNext(scope, lexer)); } }
internal LogicalOrExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // ANDEXPR || ANDEXPR ... _first = GetNext(scope, lexer); while (lexer.TokenType == TokenEnum.PIPEPIPE // || ) { lexer.Next(); //PIPEPIPE _set.Add(GetNext(scope, lexer)); } }
internal IndexedExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // EXPR[EXPR,...][EXPR,...]... // ^ // Multi-dimensional array example: // float[,][,] f = new float[1, 1][,]; // f[0, 0][1, 2] = 1; _expression = GetNext(scope, lexer); if (lexer.TokenType != TokenEnum.SQBRACKETOPEN) { return; } // indexer if (lexer.TokenType == TokenEnum.SQBRACKETOPEN) { List <CExpression[]> indexlist = new List <CExpression[]>(); while (lexer.TokenType == TokenEnum.SQBRACKETOPEN) { lexer.Next(); // SQBRACKETOPEN List <CExpression> innerlist = new List <CExpression> { new Expression(scope, lexer).Get() }; while (lexer.TokenType == TokenEnum.COMMA) { lexer.Next(); // COMMA innerlist.Add(new Expression(scope, lexer).Get()); } indexlist.Add(innerlist.ToArray()); if (lexer.TokenType != TokenEnum.SQBRACKETCLOSE) { throw new ParseException(lexer, TokenEnum.SQBRACKETCLOSE); } lexer.Next(); // SQBRACKETCLOSE } _indices_expr = indexlist.ToArray(); _indices = new int[_indices_expr.Length][]; for (int i = 0; i < _indices_expr.Length; i++) { _indices[i] = new int[_indices_expr[i].Length]; } } }
internal AddExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // MULTIEXPR + MULTIEXPR ... // MULTIEXPR - MULTIEXPR ... _first = GetNext(scope, lexer); while (lexer.TokenType == TokenEnum.PLUS || // + lexer.TokenType == TokenEnum.MINUS // - ) { TokenEnum _type = lexer.TokenType; lexer.Next(); //PLUS / MINUS _set.Add(GetNext(scope, lexer), _type); } }
internal MultiplyExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // UNARYEXPR * UNARYEXPR ... // UNARYEXPR / UNARYEXPR ... // UNARYEXPR % UNARYEXPR ... _first = GetNext(scope, lexer); while (lexer.TokenType == TokenEnum.ASTERISK || // * lexer.TokenType == TokenEnum.SLASH || // / lexer.TokenType == TokenEnum.PERCENT // % ) { TokenEnum _type = lexer.TokenType; lexer.Next(); //ASTERISK / SLASH / PERCENT _set.Add(GetNext(scope, lexer), _type); } }
internal TernaryExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // OREXPR ? EXPR : EXPR _question = GetNext(scope, lexer); if (lexer.TokenType == TokenEnum.QUESTIONMARK) { lexer.Next(); // QUESTIONMARK _true = new Expression(scope, lexer).Get(); if (lexer.TokenType == TokenEnum.COLON) { lexer.Next(); // COLON _false = new Expression(scope, lexer).Get(); } else { throw new ParseException(lexer, TokenEnum.COLON); } } }
internal EqualityExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // RELATEEXPR == RELATEEXPR ... // RELATEEXPR != RELATEEXPR ... _first = GetNext(scope, lexer); TokenEnum _type = lexer.TokenType; if (_type == TokenEnum.EQUAL // == ) { lexer.Next(); //EQUAL _second = GetNext(scope, lexer); } else if (_type == TokenEnum.NOTEQUAL // != ) { lexer.Next(); //NOTEQUAL isUnequal = true; _second = GetNext(scope, lexer); } }
internal RelationalExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // ADDEXPR < ADDEXPR // ADDEXPR > ADDEXPR // ADDEXPR <= ADDEXPR // ADDEXPR <= ADDEXPR _first = GetNext(scope, lexer); _type = lexer.TokenType; if (_type == TokenEnum.LESSTHAN || // < _type == TokenEnum.GREATERTHAN || // > _type == TokenEnum.LESSEQUAL || // <= _type == TokenEnum.GREATEREQUAL // >= ) { lexer.Next(); //LESSTHAN / GREATERTHAN / LESSEQUAL / GREATEREQUAL _second = GetNext(scope, lexer); } else { _type = TokenEnum.NOTHING; } }
internal UnaryExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { // + PRIAMRY // - PRIAMRY // ! PRIAMRY // ~ PRIAMRY // not supported yet // TO DO: ++/-- PRIMARY; // ^ _type = lexer.TokenType; if (_type == TokenEnum.PLUS || // + _type == TokenEnum.MINUS || // - _type == TokenEnum.NOT // ! ) { lexer.Next(); // PLUS / MINUS / NOT _primary = GetNext(scope, lexer); } else { _primary = GetNext(scope, lexer); _type = TokenEnum.NOTHING; } }
internal NewObjectExpression(ContextScope scope, Lexer lexer) : base(scope, lexer) { if (lexer.TokenType == TokenEnum.NEW) { lexer.Next(); //NEW _ttypeName = lexer.TokenContents; _type = Parser.TypeTokens[_ttypeName]; if (_type == null) { throw new ParseException(lexer, "Type identifier expected, read '{0}' instead.".F(_ttypeName)); } lexer.Next(); //DECL if (lexer.TokenType == TokenEnum.SQBRACKETOPEN) { lexer.Next(); // SQBRACKETOPEN List <int> dimlist = new List <int>(); List <CExpression> innerlist = new List <CExpression>(); bool deferred_add = false; // if there is no indexes in the first rank, this indicates that the first rank dimensions is to be determined by a differed add operation. if (lexer.TokenType == TokenEnum.SQBRACKETCLOSE || lexer.TokenType == TokenEnum.COMMA) { deferred_add = true; } if (!deferred_add) { innerlist.Add(new Expression(scope, lexer).Get()); } while (lexer.TokenType == TokenEnum.COMMA) { lexer.Next(); // COMMA if (!deferred_add) { innerlist.Add(new Expression(scope, lexer).Get()); } } if (innerlist.Count > 0) { _first_indices = innerlist.ToArray(); } dimlist.Add(innerlist.Count); if (lexer.TokenType != TokenEnum.SQBRACKETCLOSE) { throw new ParseException(lexer, TokenEnum.SQBRACKETCLOSE); } lexer.Next(); // SQBRACKETCLOSE while (lexer.TokenType == TokenEnum.SQBRACKETOPEN) { lexer.Next(); // SQBRACKETOPEN int count = 1; while (lexer.TokenType == TokenEnum.COMMA) { lexer.Next(); // COMMA count++; } dimlist.Add(count); if (lexer.TokenType != TokenEnum.SQBRACKETCLOSE) { throw new ParseException(lexer, TokenEnum.SQBRACKETCLOSE); } lexer.Next(); // SQBRACKETCLOSE } if (deferred_add) { _first_element = new ArrayLiteral(scope, lexer); dimlist[0] = _first_element.Dimensions.Length; } _dimensions = dimlist.ToArray(); } else { if (lexer.TokenType != TokenEnum.BRACKETOPEN) { throw new ParseException(lexer, TokenEnum.BRACKETOPEN); } lexer.Next(); //BRACKETOPEN while (lexer.TokenType != TokenEnum.BRACKETCLOSE) { _param.Add(new Expression(scope, lexer).Get()); while (lexer.TokenType == TokenEnum.COMMA) { lexer.Next(); //COMMA _param.Add(new Expression(scope, lexer).Get()); } } lexer.Next(); //BRACKETCLOSE } // allocate once oparam = new object[_param.Count]; if (_dimensions != null && _dimensions.Length > 0) { _first_dimensions = _first_indices != null ? new int[_first_indices.Length] : _first_element.Dimensions; } } else { _expr = GetNext(scope, lexer); } }
internal Expression(ContextScope scope, Lexer lexer) : base(scope, lexer) { _expr = GetNext(scope, lexer); }