/*****************************************************************************/ private bool CheckStringLiteral(char chCheck, char ch, char?prev, Token.TokenType tokenType) { if (ch == chCheck && prev != '\\') { if (this._token?.Type == tokenType) { // End of literal this.PushToken(new Token(this._token.Value + ch, Token.TokenType.Literal)); } else { // Push any previous tokens this.PushToken(); // Beginning of literal this._token = new Token { Value = ch.ToString(), Type = tokenType }; } return(true); } return(false); }
private void AddOperatorToken(Token.TokenType type) { Token tok; tok = new Token(type); TokenList.Add(tok); }
public GeneralTokenizer(Token.TokenType type, string name, Token.TokenType previousType = Token.TokenType.NA, params string[] previousNames) { Type = type; Name = name; PreviousType = previousType; PreviousNames = previousNames; }
public void OnPlay() { Token.TokenPlayer[] players = new Token.TokenPlayer[playerCount]; Token.TokenType[] types = new Token.TokenType[playerCount]; for (int i = 0; i < playerCount; i++) { players [i] = Token.TokenPlayer.Computer; types [i] = (Token.TokenType)i; if (types [i] == selectedToken) { players [i] = Token.TokenPlayer.Human; } } if ((int)selectedToken >= playerCount) { players [playerCount - 1] = Token.TokenPlayer.Human; types [playerCount - 1] = selectedToken; } GameMaster gm = GameMaster.instance; gm.SelectedTokens = types; gm.SelectedTokenPlayers = players; SceneManager.LoadScene("GamePlay"); }
public void GenArithmetic() { var comparison = new Token.TokenType[12] { Token.TokenType.TOKEN_SYMBOL, Token.TokenType.TOKEN_EQU, Token.TokenType.TOKEN_INT, Token.TokenType.TOKEN_ADD, Token.TokenType.TOKEN_INT, Token.TokenType.TOKEN_SUB, Token.TokenType.TOKEN_INT, Token.TokenType.TOKEN_DIV, Token.TokenType.TOKEN_INT, Token.TokenType.TOKEN_MUL, Token.TokenType.TOKEN_INT, Token.TokenType.TOKEN_END }; _lexer.AddStatement("variable= 1 + 1 - 1 / 1 * 1"); for (int i = 0; i < 12; i++) { var compare = comparison[i]; var token = _lexer.GetTokens().ElementAt(i); Assert.AreEqual(token.Type, compare); Console.WriteLine(i + "| " + token.Type + " == " + compare); } }
private void Expect(Token.TokenType tokenType) { if (!IsNext(tokenType)) { throw new Exception($"Unexpected token {_lexer.Peek( )} at position {_lexer.Position}"); } }
public static void CheckTokenType(Token.TokenType expectedTokenType, Token.TokenType actualTokenType) { if (actualTokenType != expectedTokenType) { throw new Exception($"Wrong token type: Expected - {expectedTokenType}, Actual - {actualTokenType}"); } }
static void expectTokenType(Token t, Token.TokenType type) { if (t.type != type) { throw new ParserErrorExpected(type.ToString(), t.type.ToString(), t); } }
private ReturnValue execute(AST tree) { Token.TokenType tokenType = tree.getToken().getTokenType(); ReturnValue returnValue = null; if (tokenType == Token.TokenType.FUNC_DECLARATION) { return(new ReturnValue()); } #if WRITE_DEBUG_INFO Console.WriteLine("Executing " + tree.getTokenType() + " " + tree.getTokenString()); #endif if (tokenType == Token.TokenType.STATEMENT_LIST) { executeAllChildNodes(tree); } else if (tokenType == Token.TokenType.FUNCTION_CALL) { returnValue = functionCall(tree); } else if (tokenType == Token.TokenType.NAME) { returnValue = name(tree); } else if (tokenType == Token.TokenType.NUMBER) { returnValue = number(tree); } else if (tokenType == Token.TokenType.OPERATOR) { returnValue = operation(tree); } else if (tokenType == Token.TokenType.QUOTED_STRING) { returnValue = quotedString(tree); } else if (tokenType == Token.TokenType.IF) { ifThenElse(tree); } else if (tokenType == Token.TokenType.VAR_DECLARATION) { varDeclaration(tree); } else if (tokenType == Token.TokenType.ASSIGNMENT) { assignment(tree); } else if (tokenType == Token.TokenType.RETURN) { returnStatement(tree); } else { throw new NotImplementedException("The interpreter hasn't got support for token type " + tokenType + " yet!"); } return(returnValue); }
public TextTokenizer(char symbol, Token.TokenType previousType = Token.TokenType.NA, Token.TokenType type = Token.TokenType.Value, string name = null) { Symbol = symbol; PreviousType = previousType; Type = type; Name = name; }
// assumes the next char in stream is '"' private Token ScanStringToken() { Token.TokenType type = Token.TokenType.StringLiteral; string value; int tokenCharPos = _charNumber; StringBuilder valueBuilder = new StringBuilder(); valueBuilder.Append(_charStream.Read()); _charNumber++; while (_charStream.Peek() != '"') { if (_charStream.Peek() == '\r' || _charStream.Peek() == '\n' || _charStream.EOF()) // add an error if the string is not terminated before new line or EOF { _errorSummary.AddError(new SyntaxError($"Lexical error. Did you forget to terminate a string?", new SourceCodePosition(_lineNumber, _charNumber - 1, _fileName))); return(new Token(type, valueBuilder.ToString(), new SourceCodePosition(_lineNumber, tokenCharPos, _fileName))); } valueBuilder.Append(_charStream.Read()); _charNumber++; } valueBuilder.Append(_charStream.Read()); _charNumber++; value = valueBuilder.ToString().Trim(new char[] { '"' }); return(new Token(type, value, new SourceCodePosition(_lineNumber, tokenCharPos, _fileName))); }
public virtual Token match(Token.TokenType expectedTokenType) { Token matchedToken = lookAhead(1); if (lookAheadType(1) == expectedTokenType) { #if WRITE_DEBUG_INFO Console.WriteLine("MATCHED TOKEN " + lookAhead(1).getTokenString() + " (line " + lookAhead(1).LineNr + ")"); #endif consumeCurrentToken(); } else { #if WRITE_DEBUG_INFO Console.WriteLine("FAILED TO MATCH TOKEN OF TYPE " + expectedTokenType.ToString() + " ...FOUND " + lookAhead(1).getTokenString() + " (line " + lookAhead(1).LineNr + ")"); #endif throw new Error( "The code word '" + lookAhead(1).getTokenString() + "'" + " does not compute. Expected " + expectedTokenType, Error.ErrorType.SYNTAX, lookAhead(1).LineNr, lookAhead(1).LinePosition); } return(matchedToken); }
void CallTokenSelected(Token.TokenType type) { if (onTokenTypeSelected != null) { onTokenTypeSelected(type); } }
private static void AssertToken(Token token, Token.TokenType type, int charIndex, int length, string value) { Assert.IsNotNull(token); Assert.AreEqual(charIndex, token.CharacterIndex); Assert.AreEqual(length, token.OriginalLength); Assert.AreEqual(type, token.Type); Assert.AreEqual(value, token.Value); }
/// <summary> /// Check for a token on the given position given. /// </summary> static private bool CheckToken(int p, Token.TokenType type, List <Token> tokens) { if (p < tokens.Count && tokens[p].type == type) { return(true); } return(false); }
// returns true if the current token is of the given type private bool Check(Token.TokenType type) { if (IsAtEnd()) { return(false); } return(Peek().type == type); }
void SetSelectionModeEnable(Token.TokenType type, bool enable) { Token[] currTokens = GetTokensOfType(type); for (int i = 0; i < currTokens.Length; i++) { currTokens [i].SelectionMode = enable; } }
private bool check(Token.TokenType tokenType) { if (isAtEnd()) { return(false); } return(peek().type == tokenType); }
private Func <Expression> getPrefixParserFunction(Token.TokenType type) { if (prefixParserMap.ContainsKey(type)) { return(prefixParserMap[type]); } return(null); }
public static Priority QueryPriority(Token.TokenType type) { if (PriorityMap.ContainsKey(type)) { return(PriorityMap[type]); } return(Priority.Lowest); }
private Token Match(Token.TokenType type) { if (Current.Type != type) { throw new ParseException(); } return(Current); }
/// <summary> /// Inserts a token with its input representaion into the token registry. /// </summary> /// <param name="str"> A string representation in input.</param> /// <param name="type"> A token type. </param> private static void RegisterToken(string str, Token.TokenType type) { if (tokenTypes.ContainsKey(str)) { throw new ArgumentException("TokenRegistry: Token Type already registered."); } tokenTypes.Add(str, type); }
public IAbstractMarkupData GetTag(string elementID, IAbstractMarkupDataContainer objectToSearch, Token.TokenType tokenType) { ElementIdToSearch = elementID; ElementTypeToSearch = tokenType; FoundElement = null; _currentLockedContentId = 0; VisitChildren(objectToSearch); return(FoundElement); }
public BracketTokenizer(char openingBracket, char closingBracket, Token.TokenType type = Token.TokenType.Value, Token.TokenType previousType = Token.TokenType.NA, string name = null, List <ITokenizer> tokenizers = null) { OpeningBracket = openingBracket; ClosingBracket = closingBracket; PreviousType = previousType; Type = type; Name = name; this.tokenizers = tokenizers; }
public void Brackets(Token.TokenType type) { if (CurrentToken().IsNumber()) { CurrentToken().IsSealed = true; } AddOperatorToken(type); }
// Helpers // checks if the next token is of the expected type, and advances private Token Consume(Token.TokenType type, string message) { if (Check(type)) { return(Advance()); } throw Error(Peek(), message); }
/* * method callback. akan dipanggil ketika dadu selesai dikocok * oleh DiceManager */ public void DiceRolled(int diceNum, Token.TokenType type) { if (diceNum == 6) { diceSixCount++; if (diceSixCount >= 3) { dice.EnableUserInteraction = false; diceSixCount = 0; if (onSixThreeTimes != null) { onSixThreeTimes(GetCurrentTokenType()); } NextTurn(); StartCoroutine(StartTurn()); return; } } lastDiceNum = diceNum; Token.TokenPlayer player = GetCurrentTokenPlayer(); List <Token> movableTokens = tokenManager.GetMovableTokens(GetCurrentTokenType(), diceNum); if (diceNum != 6 && movableTokens.Count <= 0) { NextTurn(); StartCoroutine(StartTurn()); return; } if (player == Token.TokenPlayer.Human) { dice.EnableUserInteraction = false; if (movableTokens.Count == 1) { if (!movableTokens [0].MoveToken(diceNum)) { NextTurn(); StartCoroutine(StartTurn()); } } else if (movableTokens.Count > 1) { for (int i = 0; i < movableTokens.Count; i++) { movableTokens [i].SelectionMode = true; } } } else if (player == Token.TokenPlayer.Computer) { opponentCtrl.Play(diceNum); } }
void Start() { SelectedTokens = new Token.TokenType[] { Token.TokenType.Blue, Token.TokenType.Red, Token.TokenType.Green, Token.TokenType.Yellow }; SelectedTokenPlayers = new Token.TokenPlayer[] { Token.TokenPlayer.Computer, Token.TokenPlayer.Computer, Token.TokenPlayer.Computer, Token.TokenPlayer.Computer }; }
public RegexTokenizer(string regex, Token.TokenType type, string name, CharRange?first = null, Token.TokenType previousType = Token.TokenType.NA, params string[] previousNames) { Regex = new Regex("^" + regex); First = first ?? new CharRange { Min = char.MinValue, Max = char.MaxValue }; Type = type; Name = name; PreviousType = previousType; PreviousNames = previousNames; }
public Token RightFromCurrent(Token.TokenType targetType) { for (int i = cursor + 1; i < mTokens.Count; i++) { if (mTokens[i].type == targetType) { return(mTokens[i]); } } return(new Token(Token.TokenType.INVALID)); }