// Having parsed a closing delimiter, eat tokens until the matching open delimiter // is found. private bool BackParseMatchingDelimiter(TokenID openDelimiter) { TokenID closeDelimiter = CurrentToken.TokenID; int level = 1; Token token = PrevToken(); // Eat the close delimiter while (token.TokenID != TokenID.EndOfInput) { if (token.TokenID == closeDelimiter) { ++level; } else if (token.TokenID == openDelimiter) { --level; if (level == 0) { PrevToken(); // eat the open delimiter break; } } token = PrevToken(); } // Back parse was successful if we matched all delimiters. return(level == 0); }
private TokenID GetTokenID(string token) { if (TokenTable == null) { InitTokenTable(); } TokenID tokenID = 0; if (TokenTable.TryGetValue(token.ToLower(), out tokenID)) { return(tokenID); } else if (string.Compare(token, "SKIP", true) == 0) { return(TokenID.comment); } else if (string.Compare(token, "COMMENT", true) == 0) { return(TokenID.comment); } else if (token.StartsWith("#")) { return(TokenID.comment); } else { TraceWarning("Skipped unknown token " + token); return(TokenID.comment); } }
public MemberAccessExpression(ExpressionNode left, ExpressionNode right, TokenID qualifierKind) : base(left.RelatedToken) { this.left = left; this.right = right; this.qualifierKind = qualifierKind; }
public void Clear() { TokenID.Clear(); TokenFrequency.Clear(); Tokens.Clear(); InvokeChanged(); }
/// <summary> /// Checks if the current token's id is equal to the given id, /// if not an error will be generated. /// </summary> /// <param name="id">ID that you wish to check against the current token's id.</param> private void CheckToken(TokenID id) { if (_currentToken.ID != id) { Error(ErrorCode.ExpectingToken, "Expecting \"" + Token.IdentFromID(id) + "\" token."); } }
private Token IncludeSpaces() { StringBuilder sString = new StringBuilder(); sString.Append(_CurrentChar); char c = (char)LA(1); while (c != 39) { sString.Append(c); Consume(); c = (char)LA(1); } Consume(); string sTemp = sString.ToString(); if (_TokenName.ContainsKey(sTemp.ToLower())) { TokenID id = (TokenID)_TokenName[sTemp.ToLower()]; TokenClass cls = (TokenClass)_TokenClass[id]; return(new Token(cls, id, sTemp)); } c = RemoveWhiteSpace(); if (c == '(') { return(new Token(TokenClass.CONTROL, TokenID.FUNCTION, sTemp)); } return(new Token(TokenClass.REFERENCE, TokenID.IDENTIFIER, sTemp)); }
public BinaryExpression(TokenID op, ExpressionNode left, ExpressionNode right) : base(left.RelatedToken) { this.op = op; this.left = left; this.right = right; // right must be 'type' }
private Token Identifier() { StringBuilder sString = new StringBuilder(); sString.Append(_CurrentChar); char c = (char)LA(1); while (char.IsLetterOrDigit(c) || c == '_') { sString.Append(c); Consume(); c = (char)LA(1); } string sTemp = sString.ToString(); //DebugTrace("Token =",sTemp); if (_TokenName.ContainsKey(sTemp.ToLower())) { TokenID id = (TokenID)_TokenName[sTemp.ToLower()]; TokenClass cls = (TokenClass)_TokenClass[id]; return(new Token(cls, id, sTemp)); } c = RemoveWhiteSpace(); if (c == '(') { return(new Token(TokenClass.CONTROL, TokenID.FUNCTION, sTemp)); } return(new Token(TokenClass.REFERENCE, TokenID.IDENTIFIER, sTemp)); }
public void VerifyID(TokenID desiredID) { if (ID != desiredID) { TraceInformation("Expected block " + desiredID + "; got " + ID); } }
private TokenID ScanKeywordOrIdentifier() { StringBuilder sb = new StringBuilder(); bool hasLettersOnly; ScanIdentifier(sb, out hasLettersOnly); string strValue = sb.ToString(); TokenID token = TokenID.Unknown; if (hasLettersOnly && currentToken != TokenID.Dot) { // It might be a keyword. KeywordInfo keyword = null; if (keywordMap.TryGetValue(strValue.ToLowerInvariant(), out keyword)) { token = keyword.Token; tokenValue = keyword.TokenValue; return(token); } } // Otherwise, it's an identifier token = TokenID.Identifier; tokenValue = strValue; return(token); }
public Token(TokenID id) { this.ID = id; this.Data = -1; this.Line = 0; this.Col = 0; }
internal Size2D(SBR block, TokenID expectedToken) { block.VerifyID(expectedToken); Width = block.ReadFloat(); Height = block.ReadFloat(); block.VerifyEndOfBlock(); }
internal Range(SBR block, TokenID expectedToken) { block.VerifyID(expectedToken); LowerLimit = block.ReadFloat(); UpperLimit = block.ReadFloat(); block.VerifyEndOfBlock(); }
public Token(TokenID id, int data) { this.ID = id; this.Data = data; this.Line = 0; this.Col = 0; }
public Token(TokenID id, int line, int col) { this.ID = id; this.Data = -1; this.Line = line; this.Col = col; }
public void VerifyID(TokenID desiredID) { if (ID != desiredID) { TraceInformation($"Expected block {desiredID}; got {ID}"); } }
/*------------------------------------------------------------------------*/ /** Creates a new NUMBER token */ public Token(int line, int linePosition, double dval) { this.line = line; this.linePosition = linePosition; tokenID = TokenID.T_NUMBER; this.dval = dval; sval = ""; }
/*------------------------------------------------------------------------*/ /** Creates a new token with a string parameter */ public Token(int line, int linePosition, TokenID tokenID, String name) { this.line = line; this.linePosition = linePosition; this.tokenID = tokenID; this.dval = 0.0; sval = name; }
/*------------------------------------------------------------------------*/ /** Creates a new token */ public Token(int line, int linePosition, TokenID tokenID) { this.line = line; this.linePosition = linePosition; this.tokenID = tokenID; this.dval = 0.0; sval = ""; }
/*------------------------------------------------------------------------*/ /** Creates a new UNKNOWN token */ public Token(int line, int linePosition) { this.line = line; this.linePosition = linePosition; tokenID = TokenID.T_UNKNOWN; dval = 0.0; sval = ""; }
/*------------------------------------------------------------------------*/ /** Creates a new STRING token */ public Token(int line, int linePosition, String sval) { this.line = line; this.linePosition = linePosition; tokenID = TokenID.T_STRING_LIT; dval = 0.0; this.sval = sval; }
internal ParseRule(TokenID ruleId, int priority, bool rightToLeft, TokenID[] ids, TokenToNode rule) { RuleId = ruleId; Priority = priority; RightToLeft = rightToLeft; Tokens = ids; Rule = rule; }
/// <summary> /// Initializes a new instance of this class. /// </summary> /// <param name="id">ID used to describe this token.</param> /// <param name="ident">Identifier used to store extra data about this token.</param> /// <param name="line">Line index this token was extracted from.</param> /// <param name="offset">Offset on line this token was extracted from.</param> /// <param name="file">File this token was extracted from.</param> public Token(TokenID id, string ident, int line, int offset, string file) { _id = id; _ident = ident; _line = line; _offset = offset; _file = file; }
public Boolean RemoveToken(String token) { Boolean output = TokenFrequency.Remove(token); output = output && TokenID.Remove(token); output = output && Tokens.Remove(token); InvokeChanged(); return output; }
internal Token(TokenizedSQL owner, TokenKind kind, TokenID identifier, int start, int current) { _owner = owner; Kind = kind; ID = identifier; Start = new TokenPosition(owner, start); End = new TokenPosition(owner, current - 1); }
internal TokenBurnTransactionBody(Hashgraph.Address token, ulong amount) : this() { if (amount < 1) { throw new ArgumentOutOfRangeException(nameof(amount), "The token amount must be greater than zero."); } Token = new TokenID(token); Amount = amount; }
internal TokenBurnTransactionBody(Hashgraph.Asset asset) : this() { if (asset is null || asset == Hashgraph.Asset.None) { throw new ArgumentOutOfRangeException(nameof(asset), "The asset cannot be null or empty."); } Token = new TokenID(asset); SerialNumbers.Add(asset.SerialNum); }
public bool AdvanceIfTokenIs(TokenID token) { if (Peek(1).TokenID == token) { ReadNextToken(); return(true); } return(false); }
public Operator ReadEvaluatedValueReference() { TokenID IDRefType = TokenID.ATSIGN; Operator oRef; Token oTok, oNextTok; oTok = _Lexer.PeekToken(); if (oTok == null || oTok.Class != TokenClass.REFERENCE) { return(null); } else { oTok = _Lexer.GetToken(); } //loop to construct an evaluating reference //that includes properties, and function references //Add these elements to the value reference as a tree... oNextTok = _Lexer.PeekToken(); if (oNextTok == null) { oRef = new ValueReference(oTok.Text); } else { switch (oNextTok.ID) { case TokenID.ATSIGN: case TokenID.SHARP: case TokenID.QUESTIONMARK: IDRefType = oNextTok.ID; oNextTok = _Lexer.GetToken(); oRef = new ValueAtReference(oTok.Text, (char)oNextTok.Text[0]); oTok = _Lexer.PeekToken(); if (oTok != null && oTok.ID != TokenID.PERIOD) { if (oTok.ID == TokenID.LPAREN || oTok.ID == TokenID.FUNCTION || oTok.Class == TokenClass.REFERENCE) { _Lexer.PutToken(new Token(TokenClass.REFERENCE, TokenID.PERIOD, ".")); } } break; default: oRef = new ValueReference(oTok.Text); break; } } return(ReadRemainingReference(oRef)); }
internal TokenFeeScheduleUpdateTransactionBody(Address token, IEnumerable <IRoyalty>?royalties) : this() { TokenId = new TokenID(token); // Note: Null & Empty are Valid, they will clear the list of fees. if (royalties is not null) { CustomFees.AddRange(royalties.Select(royalty => new CustomFee(royalty))); } }
public Token(TokenID id, int line, int col) { this.ID = id; this.Data = -1; this.Line = line; this.Col = col; this.GenericStart = true; this.NullableDeclaration = true; }
public Token(TokenID id, int data) { this.ID = id; this.Data = data; this.Line = 0; this.Col = 0; this.GenericStart = true; this.NullableDeclaration = true; }
public Token(TokenID id, int line, int col) { ID = id; Data = -1; Line = line; Col = col; GenericStart = false; NullableDeclaration = true; LastCharWasGreater = false; }
public Token(TokenID id, int data) { ID = id; Data = data; Line = 0; Col = 0; GenericStart = false; NullableDeclaration = true; LastCharWasGreater = false; }
// Maps a type keyword token to a symbol type. static SymType TokenToType(TokenID token) { switch (token) { case TokenID.KINTEGER: return SymType.INTEGER; case TokenID.KREAL: return SymType.FLOAT; case TokenID.KDPRECISION: return SymType.DOUBLE; case TokenID.KLOGICAL: return SymType.BOOLEAN; case TokenID.KCHARACTER: return SymType.FIXEDCHAR; case TokenID.KCOMPLEX: return SymType.COMPLEX; default: Debug.Assert(true, "TokenToType called with invalid argument"); return SymType.NONE; } }
private bool BackParseMatchingDelimiter(TokenID openDelimiter) { TokenID tokenID = this.CurrentToken.TokenID; int num = 1; for (Token token = this.PrevToken(); token.TokenID != TokenID.EndOfInput; token = this.PrevToken()) { if (token.TokenID == tokenID) { num++; } else if (token.TokenID == openDelimiter) { num--; if (num == 0) { this.PrevToken(); break; } } } return (num == 0); }
/// <summary> /// Checks if the current token's id is equal to the given id, /// if not an error will be generated. /// </summary> /// <param name="id">ID that you wish to check against the current token's id.</param> private void CheckToken(TokenID id) { if (_currentToken.ID != id) Error(ErrorCode.ExpectingToken, "Expecting \"" + Token.IdentFromID(id) + "\" token.", false, 0); }
private Token NextToken() { string message = null; Token token; TokenID unknown = TokenID.Unknown; char ch = this.CurrentChar(); ch = this.SkipWhitespace(ch); if (ch == '\0') { return new Token(TokenID.EndOfInput, this.currentPosition, null); } this.tokenStartPosition = this.currentPosition; this.tokenValue = null; if (char.IsDigit(ch)) { unknown = this.ScanNumber(); } else if (char.IsLetter(ch)) { unknown = this.ScanKeywordOrIdentifier(); } else { switch (ch) { case '!': unknown = TokenID.Not; if (this.NextChar() == '=') { this.NextChar(); unknown = TokenID.NotEqual; } goto Label_0397; case '"': unknown = this.ScanStringLiteral(); this.NextChar(); goto Label_0397; case '%': unknown = TokenID.Modulus; this.NextChar(); goto Label_0397; case '&': unknown = TokenID.BitAnd; if (this.NextChar() == '&') { this.NextChar(); unknown = TokenID.And; } goto Label_0397; case '\'': unknown = this.ScanCharacterLiteral(); this.NextChar(); goto Label_0397; case '(': unknown = TokenID.LParen; this.NextChar(); goto Label_0397; case ')': unknown = TokenID.RParen; this.NextChar(); goto Label_0397; case '*': unknown = TokenID.Multiply; this.NextChar(); goto Label_0397; case '+': unknown = TokenID.Plus; this.NextChar(); goto Label_0397; case ',': unknown = TokenID.Comma; this.NextChar(); goto Label_0397; case '-': unknown = TokenID.Minus; this.NextChar(); goto Label_0397; case '.': unknown = TokenID.Dot; if (!char.IsDigit(this.PeekNextChar())) { this.NextChar(); } else { unknown = this.ScanDecimal(); } goto Label_0397; case '/': unknown = TokenID.Divide; this.NextChar(); goto Label_0397; case ';': unknown = TokenID.Semicolon; this.NextChar(); goto Label_0397; case '<': unknown = TokenID.Less; switch (this.NextChar()) { case '=': this.NextChar(); unknown = TokenID.LessEqual; break; case '>': this.NextChar(); unknown = TokenID.NotEqual; break; } goto Label_0397; case '=': unknown = TokenID.Assign; if (this.NextChar() == '=') { this.NextChar(); unknown = TokenID.Equal; } goto Label_0397; case '>': unknown = TokenID.Greater; if (this.NextChar() == '=') { this.NextChar(); unknown = TokenID.GreaterEqual; } goto Label_0397; case '@': ch = this.NextChar(); if (ch != '"') { message = string.Format(CultureInfo.CurrentCulture, Messages.Parser_InvalidCharacter, new object[] { ch }); throw new RuleSyntaxException(0x17b, message, this.tokenStartPosition); } unknown = this.ScanVerbatimStringLiteral(); this.NextChar(); goto Label_0397; case '[': unknown = TokenID.LBracket; this.NextChar(); goto Label_0397; case ']': unknown = TokenID.RBracket; this.NextChar(); goto Label_0397; case '_': unknown = this.ScanKeywordOrIdentifier(); goto Label_0397; case '{': unknown = TokenID.LCurlyBrace; this.NextChar(); goto Label_0397; case '|': unknown = TokenID.BitOr; if (this.NextChar() == '|') { this.NextChar(); unknown = TokenID.Or; } goto Label_0397; case '}': unknown = TokenID.RCurlyBrace; this.NextChar(); goto Label_0397; } this.NextChar(); message = string.Format(CultureInfo.CurrentCulture, Messages.Parser_InvalidCharacter, new object[] { ch }); throw new RuleSyntaxException(0x17b, message, this.tokenStartPosition); } Label_0397: token = new Token(unknown, this.tokenStartPosition, this.tokenValue); this.currentToken = unknown; return token; }
private void AssertAndAdvance(TokenID id) { if (curtok.ID != id) { RecoverFromError(id); } Advance(); }
/// <summary> /// Parses a statement block. A block is usually a /// block of statements surrounded with braces. /// </summary> private void ParseBlock(TokenID startTokenID, TokenID endTokenID) { ExpectToken(startTokenID); // Check if we are missing a closing brace. if (LookAheadToken() != null) { // Check if we are parsing an empty block. if (LookAheadToken().ID == endTokenID) { NextToken(); return; } } else Error(ErrorCode.ExpectingToken, "Expecting \"}\" token.", false, 0); // Keep parsing statements until the end of the block. while (LookAheadToken() != null && LookAheadToken().ID != endTokenID) { if (EndOfTokenStream() == true) { CheckToken(endTokenID); return; } ParseStatement(); } ExpectToken(endTokenID); }
internal KeywordInfo(TokenID token) : this(token, null) { }
private Token NextToken() { string message = null; // for any error messages. TokenID tokenID = TokenID.Unknown; char ch = CurrentChar(); ch = SkipWhitespace(ch); if (ch == '\0') return new Token(TokenID.EndOfInput, currentPosition, null); tokenStartPosition = currentPosition; tokenValue = null; if (char.IsDigit(ch)) { tokenID = ScanNumber(); } else if (char.IsLetter(ch)) { tokenID = ScanKeywordOrIdentifier(); } else { switch (ch) { case '_': tokenID = ScanKeywordOrIdentifier(); break; case '+': tokenID = TokenID.Plus; NextChar(); break; case '-': tokenID = TokenID.Minus; NextChar(); break; case '*': tokenID = TokenID.Multiply; NextChar(); break; case '/': tokenID = TokenID.Divide; NextChar(); break; case '%': tokenID = TokenID.Modulus; NextChar(); break; case '&': tokenID = TokenID.BitAnd; if (NextChar() == '&') { NextChar(); tokenID = TokenID.And; } break; case '|': tokenID = TokenID.BitOr; if (NextChar() == '|') { NextChar(); tokenID = TokenID.Or; } break; case '=': tokenID = TokenID.Assign; if (NextChar() == '=') { // It's "==", so the token is Equal NextChar(); tokenID = TokenID.Equal; } break; case '!': tokenID = TokenID.Not; if (NextChar() == '=') { NextChar(); tokenID = TokenID.NotEqual; } break; case '<': tokenID = TokenID.Less; ch = NextChar(); if (ch == '=') { NextChar(); tokenID = TokenID.LessEqual; } else if (ch == '>') { NextChar(); tokenID = TokenID.NotEqual; } break; case '>': tokenID = TokenID.Greater; if (NextChar() == '=') { NextChar(); tokenID = TokenID.GreaterEqual; } break; case '(': tokenID = TokenID.LParen; NextChar(); break; case ')': tokenID = TokenID.RParen; NextChar(); break; case '.': tokenID = TokenID.Dot; if (char.IsDigit(PeekNextChar())) tokenID = ScanDecimal(); else NextChar(); // consume the '.' break; case ',': tokenID = TokenID.Comma; NextChar(); break; case ';': tokenID = TokenID.Semicolon; NextChar(); break; case '[': tokenID = TokenID.LBracket; NextChar(); break; case ']': tokenID = TokenID.RBracket; NextChar(); break; case '{': tokenID = TokenID.LCurlyBrace; NextChar(); break; case '}': tokenID = TokenID.RCurlyBrace; NextChar(); break; case '@': ch = NextChar(); if (ch == '"') { tokenID = ScanVerbatimStringLiteral(); } else { message = string.Format(CultureInfo.CurrentCulture, Messages.Parser_InvalidCharacter, ch); throw new RuleSyntaxException(ErrorNumbers.Error_InvalidCharacter, message, tokenStartPosition); } NextChar(); break; case '"': tokenID = ScanStringLiteral(); NextChar(); break; case '\'': tokenID = ScanCharacterLiteral(); NextChar(); break; default: NextChar(); message = string.Format(CultureInfo.CurrentCulture, Messages.Parser_InvalidCharacter, ch); throw new RuleSyntaxException(ErrorNumbers.Error_InvalidCharacter, message, tokenStartPosition); } } Token token = new Token(tokenID, tokenStartPosition, tokenValue); currentToken = tokenID; return token; }
/// Check whether the next token is the one specified and skip it if so. void SkipToken(TokenID id) { SimpleToken token = _ls.GetToken(); if (token.ID != id) { _ls.BackToken(); } }
/// Ensure that the next token in the input is the one expected and report an error otherwise. SimpleToken ExpectToken(TokenID expectedID) { SimpleToken token = _ls.GetToken(); if (token.KeywordID != expectedID) { _messages.Error(MessageCode.EXPECTEDTOKEN, String.Format("Expected '{0}' but saw '{1}' instead", Tokens.TokenIDToString(expectedID), Tokens.TokenIDToString(token.KeywordID))); _ls.BackToken(); return null; } return token; }
// Return whether the specified token marks the end of an inner IF block bool IsEndOfIfBlock(TokenID id) { switch (id) { case TokenID.KELSEIF: case TokenID.KELSE: case TokenID.KENDIF: case TokenID.ENDOFFILE: return true; } return false; }
/// <summary> /// Converts a keyword token to a data type. /// </summary> /// <param name="token">Token to convert.</param> /// <returns>DataType representing keyword.</returns> private DataType DataTypeFromKeywordToken(TokenID token) { switch (token) { case TokenID.KeywordBool: return DataType.Bool; case TokenID.KeywordByte: return DataType.Byte; case TokenID.KeywordDouble: return DataType.Double; case TokenID.KeywordFloat: return DataType.Float; case TokenID.KeywordInt: return DataType.Int; case TokenID.KeywordLong: return DataType.Long; case TokenID.KeywordShort: return DataType.Short; case TokenID.KeywordString: return DataType.String; case TokenID.KeywordObject: return DataType.Object; case TokenID.KeywordVoid: return DataType.Void; default: return DataType.Invalid; } }
/// <summary> /// Converts a type token to a data type. /// </summary> /// <param name="token">Token to convert.</param> /// <returns>DataType representing type.</returns> private DataType DataTypeFromTypeToken(TokenID token) { switch (token) { case TokenID.TypeBoolean: return DataType.Bool; case TokenID.TypeByte: return DataType.Byte; case TokenID.TypeDouble: return DataType.Double; case TokenID.TypeFloat: return DataType.Float; case TokenID.TypeInteger: return DataType.Int; case TokenID.TypeLong: return DataType.Long; case TokenID.TypeShort: return DataType.Short; case TokenID.TypeString: return DataType.String; case TokenID.TypeVoid: return DataType.Void; default: return DataType.Invalid; } }
public OperatorName(TokenID token, Context context) : base(token.ToString(), NameVisibilityRestriction.Everyone, Scope.Static, context) { }
/// <summary> /// Reads in the next token and advances the stream, it also checks /// if the next token's id is equal to the given id if not an error /// will be generated. /// </summary> /// <param name="id">ID that you wish to check against the current token's id.</param> /// <returns>Next token in script.</returns> private Token ExpectToken(TokenID id) { NextToken(); CheckToken(id); return _currentToken; }
public PredefinedTypeNode(TokenID type, Token relatedToken) : base( new IdentifierExpression(type.ToString().ToLower(), relatedToken) ) { this.type = type; }
/// <summary> /// Returns true if the given data type is valid for the given operator. /// </summary> /// <param name="type">Data type to check operator against.</param> /// <param name="opToken">Operator to check validitity for.</param> /// <returns>True if the given data type is valid for the given operator.</returns> private bool OperatorDataTypeValid(DataTypeValue type, TokenID opToken) { if (type.DataType == DataType.Invalid) return false; if (type.IsArray == true && opToken != TokenID.OpAssign && opToken != TokenID.OpEqual && opToken != TokenID.OpNotEqual) return false; switch (opToken) { case TokenID.OpAdd: return (type.DataType == DataType.Object ? false : true); case TokenID.OpAssign: return true; case TokenID.OpAssignAdd: return (type.DataType == DataType.Object ? false : true); case TokenID.OpAssignBitwiseAnd: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignBitwiseOr: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignBitwiseNot: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignBitwiseSHL: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignBitwiseSHR: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignBitwiseXOr: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignDivide: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpAssignModulus: return (type.DataType != DataType.Int ? false : true); case TokenID.OpAssignMultiply: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpAssignSub: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpBitwiseAnd: return (type.DataType != DataType.Int ? false : true); case TokenID.OpBitwiseOr: return (type.DataType != DataType.Int ? false : true); case TokenID.OpBitwiseSHL: return (type.DataType != DataType.Int ? false : true); case TokenID.OpBitwiseSHR: return (type.DataType != DataType.Int ? false : true); case TokenID.OpBitwiseXOr: return (type.DataType != DataType.Int ? false : true); case TokenID.OpBitwiseNot: return (type.DataType != DataType.Int ? false : true); case TokenID.OpDecrement: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpDivide: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpEqual: return true; case TokenID.OpGreater: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpGreaterEqual: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpIncrement: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpLess: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpLessEqual: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpLogicalAnd: return (type.DataType != DataType.Bool ? false : true); case TokenID.OpLogicalOr: return (type.DataType != DataType.Bool ? false : true); case TokenID.OpLogicalNot: return (type.DataType != DataType.Bool ? false : true); case TokenID.OpModulus: return (type.DataType != DataType.Int ? false : true); case TokenID.OpMultiply: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); case TokenID.OpNotEqual: return true; case TokenID.OpSub: return (type.DataType == DataType.Object || type.DataType == DataType.String || type.DataType == DataType.Null ? false : true); default: return false; } }
internal KeywordInfo(TokenID token, object tokenValue) { this.Token = token; this.TokenValue = tokenValue; }
public BinaryExpression(TokenID op, ExpressionNode left) : base(left.RelatedToken) { this.op = op; this.left = left; }
// Ensure that the next token in the input is the one expected and report an error otherwise. // BUGBUG: Remove when full tokeniser implemented! void LocalExpectToken(TokenID expectedID) { SimpleToken token = GetToken(); if (token.KeywordID != expectedID) { _messages.Error(MessageCode.EXPECTEDTOKEN, String.Format("Expected {0}, but saw {1} instead", Tokens.TokenIDToString(expectedID), token.KeywordID)); BackToken(); } }
public BinaryExpression(TokenID op, Token relatedtoken) : base(relatedtoken) { this.op = op; }