public static Expr AssignMulti(bool declare, List<AssignExpr> exprs, TokenData token) { var exp = new AssignMultiExpr(); exp.Assignments = exprs; SetupContext(exp, token); return exp; }
/// <summary> /// Creates an array expression from the parameters supplied. /// </summary> /// <param name="items"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Array(List<Expr> items, TokenData token) { var exp = new ArrayExpr(); exp.Exprs = items; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Binary(Expr left, Operator op, Expr right, TokenData token) { var exp = new BinaryExpr(); exp.Left = left; exp.Op = op; exp.Right = right; SetupContext(exp, token); return exp; }
/// <summary> /// Creates a unary expression with symbol scope, context, script refernce set. /// </summary> /// <param name="name"></param> /// <param name="token"></param> /// <returns></returns> public static Expr Assign(bool declare, Expr left, Expr right, TokenData token) { var exp = new AssignExpr(); exp.IsDeclaration = declare; exp.VarExp = left; exp.ValueExp = right; SetupContext(exp, token); return exp; }
/// <summary> /// Builds a literal date expression from the inputs. /// </summary> /// <param name="date"></param> /// <param name="time"></param> /// <returns></returns> public Token ToConstDateTimeToken(TokenData date, TokenData time) { var d = (DateTime)date.Token.Value; var t = (TimeSpan) time.Token.Value; var datetime = new DateTime(d.Year, d.Month, d.Day, t.Hours, t.Minutes, t.Seconds); var text = date.Token.Text + " " + time.Token.Text; var token = date.Token.Clone(); token.SetTextAndValue(text, datetime); return token; }
public string GenerateAccessToken(TokenData data) { if (data == null) throw new ArgumentNullException("data"); var bytes = GetAccessTokenBytes(data); var encryptedBytes = _encryptor.Encrypt(bytes); return encryptedBytes.ToBase65(); }
/// <summary> /// Adds the counts from a collection. /// Only tokens that exist in the destination will be added. /// </summary> public static TokenCollection Add(TokenCollection pDest, TokenCollection pSrc) { pSrc = Exclude(pDest._data.Tokens.Keys, pSrc); TokenData copy = new TokenData(pDest._data); foreach (KeyValuePair<string, int> pair in pSrc._data.Tokens) { copy.Tokens[pair.Key] += pSrc._data.Tokens[pair.Key]; } return new TokenCollection(copy); }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // print no quotes needed! var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; var lineToken = _lexer.ReadLine(false); var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(takeoverToken); _lexer.ParsedTokens.Add(t); return new Token[] { takeoverToken.Token, lineToken }; }
/// <summary> /// Parses a function declaration statement. /// This method is made public to allow other plugins to be used to allow different /// words to represent "function" e.g. "def" instead of "function" /// </summary> /// <param name="token">The tokendata representing the starting token e.g. "function".</param> /// <param name="expectToken">Whether or not to expect the token in tokenData. /// If false, advances the token iterator</param> /// <returns></returns> public Expr Parse(TokenData token, bool expectToken) { var stmt = new FunctionDeclareExpr(); _parser.SetupContext(stmt.Function, token); if (expectToken) _tokenIt.Expect(token.Token); else _tokenIt.Advance(); // Function name. var name = _tokenIt.ExpectId(true, true); var aliases = new List<string>(); var nextToken = _tokenIt.NextToken; List<string> argNames = null; // Option 1: Wild card if (nextToken.Token == Tokens.Multiply) { stmt.Function.Meta.HasWildCard = true; nextToken = _tokenIt.Advance(); } // Option 2: Aliases else if (nextToken.Token == Tokens.Comma) { // Collect all function aliases while (nextToken.Token == Tokens.Comma) { _tokenIt.Advance(); var alias = _tokenIt.ExpectId(true, true); aliases.Add(alias); nextToken = _tokenIt.NextToken; } if (aliases.Count > 0) stmt.Function.Meta.Aliases = aliases; } // Get the parameters. if (nextToken.Token == Tokens.LeftParenthesis) { _tokenIt.Expect(Tokens.LeftParenthesis); argNames = _parser.ParseNames(); _tokenIt.Expect(Tokens.RightParenthesis); } stmt.Function.Meta.Init(name, argNames); // Now parser the function block. ParseBlock(stmt.Function); return stmt; }
/// <summary> /// Parse the entire line. /// </summary> /// <param name="includeNewLine"></param> /// <returns></returns> protected Token[] ParseLine(bool includeNewLine) { // print no quotes needed! var takeoverToken = _lexer.LastTokenData; int line = _lexer.State.Line; int pos = _lexer.State.LineCharPosition; // This stops on the last char before the newline. // So move forward one. var lineToken = _lexer.ReadLine(false); var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(takeoverToken); _lexer.ParsedTokens.Add(t); return new Token[] { takeoverToken.Token, lineToken }; }
public Token GetAuthorizationToken(long consumerId, long resourceOwnerId, string redirectUri) { var data = new TokenData { ConsumerId = consumerId, Timestamp = DateTimeOffset.UtcNow.DateTime.Ticks, ResourceOwnerId = resourceOwnerId, RedirectUri = redirectUri }; return new Token { AuthorizationToken = ServiceLocator.Issuer.GenerateAuthorizationToken(data), ExpiresIn = ServiceLocator.Configuration.AuthorizationTokenExpirationLength }; }
public string GenerateAuthorizationToken(TokenData data) { if (data == null) throw new ArgumentNullException("data"); var bytes = GetAuthorizationTokenBytes(data); var encryptedBytes = _encryptor.Encrypt(bytes); var encryptedString = encryptedBytes.ToBase65(); if (data.RedirectUri.HasValue()) encryptedString += "|" + _encryptor.Encrypt(Encoding.ASCII.GetBytes(data.RedirectUri)).ToBase65(); return encryptedString; }
public static void ParseData(ParseNode node) { var tokenData = node.Children.First(); var defineText = tokenData.Content.Remove(0, "#property ".Length); var lexer = new Mq4Lexer(); var defineTokens = lexer.BuildTextTokens(defineText); var nodes = defineTokens.Select(token => token.BuildTerminalNode()).ToList(); var states = new CleanupAstNodeStates(nodes); var assemblyOfAttributes = typeof(indicator_color1Attribute).Assembly; var fullTypeName = string.Format("Mql2Fdk.Attributes.{0}Attribute", states[0].Content); var fullTypeNameReduced = string.Format("Mql2Fdk.Attributes.{0}", states[0].Content); var getTypeofAttribute = assemblyOfAttributes.GetType(fullTypeName) ?? assemblyOfAttributes.GetType(fullTypeNameReduced); bool needsQuotes = false; var typeName = getTypeofAttribute!=null ? ComputeTypenameFromReflection(getTypeofAttribute, states, ref needsQuotes) : ComputeTypeFromStatements(states, ref needsQuotes); var variableToken = states.MappedNodes[0]; var valueToken = states.Count != 1 ? states.MappedNodes[1] : new TokenData(0, 0, TokenKind.Int, "1").BuildTerminalNode(); if (needsQuotes) { var finalTokenData = valueToken.GetTokenData(); finalTokenData.Token = TokenKind.QuotedString; finalTokenData.Content = string.Format("\"{0}\"", finalTokenData.Content); } node.Children.Clear(); var insertTokenType = new TokenData(0, 0, TokenKind.TypeName, typeName); var buildTerminalToken = insertTokenType.BuildTerminalNode(); node.Add(buildTerminalToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Space, " ")); node.Add(variableToken); node.AddTerminalToken(new TokenData(0, 0, TokenKind.Assign, "=")); node.Add(valueToken); var colon = new TokenData(0, 0, TokenKind.SemiColon, ";").BuildTerminalNode(); node.Children.Add(colon); }
public TokenData Authorize(string username, string password) { BusinessPrincipal.Login(username, password); var result = new TokenData(); var token = new Token(); var tokenRepository = new TokenRepository(); token.Key = Guid.NewGuid().ToString().ToUpper(); token.UserName = username; token.CreatedDate = DateTime.Now; token.ExpirationDate = DateTime.Now.AddMinutes(30); tokenRepository.AddToken(token); result.Key = token.Key; return result; }
protected virtual void AuthorizeRequest(ActionExecutingContext filterContext) { var isAuthorized = false; try { var resourceRequest = new ResourceRequest(new HttpRequestBaseRequest(Request), _serviceLocator); isAuthorized = resourceRequest.Authorize(); TokenData = _serviceLocator.Issuer.DecodeAccessToken(resourceRequest.AccessToken); } catch (OAuthException) { } if (isAuthorized) return; throw new UnauthorizedAccessException(); }
public Token Authorize(IOAuthRequest request) { _logger.Debug("Authorizing refresh token request"); if (request.ContentType != ContentType.FormEncoded) throw new OAuthException(ErrorCode.InvalidRequest, "Invalid content type."); //Make sure consumer is valid var consumer = _consumerRepository.GetByClientId(request.ClientId); if (consumer == null) throw new OAuthException(ErrorCode.InvalidClient, "Client credentials are invalid"); if (consumer.Secret != request.ClientSecret) throw new OAuthException(ErrorCode.InvalidClient, "User credentials are invalid"); var refreshToken = request.RefreshToken; if (string.IsNullOrWhiteSpace(refreshToken)) throw new OAuthException(ErrorCode.InvalidRequest, "Refresh token is invalid"); var tokenData = _issuer.DecodeRefreshToken(refreshToken); if (tokenData.ConsumerId != consumer.ConsumerId) throw new OAuthException(ErrorCode.UnauthorizedClient, "Refresh token is invalid"); if (!_resourceOwnerRepository.IsConsumerApproved(tokenData.ResourceOwnerId, tokenData.ConsumerId)) throw new OAuthException(ErrorCode.UnauthorizedClient, "Unauthorized access"); var newTokenData = new TokenData { ConsumerId = consumer.ConsumerId, ResourceOwnerId = tokenData.ResourceOwnerId, Timestamp = DateTimeOffset.UtcNow.DateTime.Ticks }; return new Token { AccessToken = _issuer.GenerateAccessToken(newTokenData), ExpiresIn = _configuration.AccessTokenExpirationLength, RefreshToken = _issuer.GenerateRefreshToken(newTokenData) }; }
public void TokenDataEqualityTest() { var dateTimeOffset = DateTimeOffset.Now; var left = new TokenData { Client = "client", ExpiresAt = dateTimeOffset, Token = "token" }; var right = new TokenData { Client = "client", ExpiresAt = dateTimeOffset, Token = "token" }; Assert.AreEqual(left.GetHashCode(), right.GetHashCode()); Assert.AreEqual(left, right); }
public Token Authorize(IOAuthRequest request) { _logger.Debug("Authorizing password token request"); if (request.ContentType != ContentType.FormEncoded) throw new OAuthException(ErrorCode.InvalidRequest, "Invalid content type."); //Make sure consumer is valid var consumer = _serviceLocator.ConsumerRepository.GetByClientId(request.ClientId); if (consumer == null) throw new OAuthException(ErrorCode.InvalidClient, "Client credentials are invalid"); if (consumer.Secret != request.ClientSecret) throw new OAuthException(ErrorCode.InvalidClient, "User credentials are invalid"); //Make sure resource owner is valid var resourceOwner = _serviceLocator.ResourceOwnerRepository.GetByUsername(consumer.ConsumerId, request.Username); if (resourceOwner == null) throw new OAuthException(ErrorCode.InvalidClient, "User credentials are invalid"); if (!_serviceLocator.PasswordHasher.CheckPassword(request.Password, resourceOwner.Password)) throw new OAuthException(ErrorCode.InvalidClient, "User credentials are invalid"); //Make sure consumer is approved by resource owner _serviceLocator.ResourceOwnerRepository.ApproveConsumer(resourceOwner.ResourceOwnerId, consumer.ConsumerId); var data = new TokenData { ConsumerId = consumer.ConsumerId, ResourceOwnerId = resourceOwner.ResourceOwnerId, Timestamp = DateTimeOffset.UtcNow.DateTime.Ticks }; return new Token { AccessToken = _serviceLocator.Issuer.GenerateAccessToken(data), RefreshToken = _serviceLocator.Issuer.GenerateRefreshToken(data), ExpiresIn = _serviceLocator.Configuration.AccessTokenExpirationLength }; }
public Token Authorize(IOAuthRequest request) { _logger.Debug("Authorizing client credentials token request"); //Make sure consumer is valid var consumer = _serviceLocator.ConsumerRepository.GetByClientId(request.ClientId); if (consumer == null || !consumer.Secret.Equals(request.ClientSecret,StringComparison.OrdinalIgnoreCase)) throw new OAuthException(ErrorCode.InvalidClient, "Client credentials are invalid"); var data = new TokenData { ConsumerId = consumer.ConsumerId, Timestamp = DateTimeOffset.UtcNow.DateTime.Ticks }; return new Token { AccessToken = _serviceLocator.Issuer.GenerateAccessToken(data), RefreshToken = _serviceLocator.Issuer.GenerateRefreshToken(data), ExpiresIn = _serviceLocator.Configuration.AccessTokenExpirationLength }; }
public Token Authorize(Request.IOAuthRequest request) { _logger.Debug("Authorizing authorization code request"); var grantType = request.GrantType; if (!grantType.HasValue()) throw new OAuthException(ErrorCode.InvalidRequest, "Parameter grant_type is missing"); if (request.GrantType != GrantType.AuthorizationCode) throw new OAuthException(ErrorCode.InvalidGrant, "The specified grant_type is not supported"); var code = request.AuthorizationCode; if (!code.HasValue()) throw new OAuthException(ErrorCode.InvalidRequest, "Parameter code is missing"); var tokenData = _issuer.DecodeAuthorizationToken(code); if ((DateTime.UtcNow - new DateTime(tokenData.Timestamp, DateTimeKind.Utc)).TotalSeconds > _configuration.AuthorizationTokenExpirationLength) throw new OAuthException(ErrorCode.InvalidRequest, "Authorization code has expired"); if (tokenData.RedirectUri != request.RedirectUri) throw new OAuthException(ErrorCode.InvalidRequest, "The specified redirect_uri is invalid"); var newTokenData = new TokenData { ConsumerId = tokenData.ConsumerId, ResourceOwnerId = tokenData.ResourceOwnerId, Timestamp = DateTime.UtcNow.Ticks }; return new Token { AccessToken = _issuer.GenerateAccessToken(newTokenData), ExpiresIn = _configuration.AccessTokenExpirationLength, RefreshToken = _issuer.GenerateRefreshToken(newTokenData), RedirectsUri = request.RedirectUri }; }
static Scanner() { int xEnumMax = Enum.GetValues(typeof(XSC.TokenType)).GetUpperBound(0); mTokenMap = new TokenData[xEnumMax + 1]; // Set Default values foreach(int i in Enum.GetValues(typeof(XSC.TokenType))) { mTokenMap[i].Type = TokenType.Unknown; mTokenMap[i].Color = TokenColor.Text; } mTokenMap[(int)XSC.TokenType.Comment] = new TokenData { Type = TokenType.LineComment, Color = TokenColor.Comment }; mTokenMap[(int)XSC.TokenType.LiteralAsm] = new TokenData { Type = TokenType.Literal , Color = TokenColor.String }; mTokenMap[(int)XSC.TokenType.AlphaNum] = new TokenData { Type = TokenType.Identifier, Color = TokenColor.Identifier }; mTokenMap[(int)XSC.TokenType.ValueInt] = new TokenData { Type = TokenType.Literal, Color = TokenColor.Number }; var xKeyword = new TokenData { Type = TokenType.Keyword, Color = TokenColor.Keyword }; mTokenMap[(int)XSC.TokenType.Register] = xKeyword; mTokenMap[(int)XSC.TokenType.Keyword] = xKeyword; mTokenMap[(int)XSC.TokenType.Delimiter] = new TokenData { Type = TokenType.Delimiter, Color = TokenColor.Text }; mTokenMap[(int)XSC.TokenType.Operator] = new TokenData { Type = TokenType.Operator, Color = TokenColor.Text }; mTokenMap[(int)XSC.TokenType.WhiteSpace] = new TokenData { Type = TokenType.WhiteSpace, Color = TokenColor.Text }; mTokenMap[(int)XSC.TokenType.Unknown] = new TokenData { Type = TokenType.Unknown, Color = TokenColor.Text }; }
private void AddAssignment(bool expectVar, Expr varExp, Expr valExp, List <AssignExpr> declarations, TokenData token) { var a = (AssignExpr)Exprs.Assign(expectVar, varExp, valExp, token); declarations.Add(a); }
private static byte[] GetAccessTokenBytes(TokenData data) { var userBytes = BitConverter.GetBytes(data.ResourceOwnerId); var tickBytes = BitConverter.GetBytes(data.Timestamp); var consumerBytes = BitConverter.GetBytes(data.ConsumerId); var bytes = new byte[24]; var index = 0; for (int i = 0; i < 8; i++) { bytes[index] = tickBytes[i]; bytes[index + 1] = userBytes[i]; bytes[index + 2] = consumerBytes[i]; index += 3; } return bytes; }
/// <summary> /// Copy constructor /// </summary> public TokenData(TokenData pData) { Tokens = new Dictionary<string, int>(pData.Tokens); }
/// <summary> /// Constructor /// </summary> public TokenCollection(Dictionary<string, int> pTokens) { _data = new TokenData(pTokens); }
public StackData(TokenData tokenData) { this.tokenData = tokenData; }
/// <summary> /// 保存令牌数据. /// </summary> /// <param name="data"></param> protected abstract void SaveTokenData(TokenData data);
private void TokenScaleFactor(IEventBase a_Event) { EventTokenScaleFactor data = a_Event as EventTokenScaleFactor; if (data == null) { Debug.LogError("[TokenManager] Scale factor Value trigger null"); return; } switch (data.EScaleType) { case eScaleType.None: break; case eScaleType.TokenType: TokenData refTokenData = data.LTokenGameObject[0].GetComponent <TokenData>(); if (refTokenData == null) { Debug.LogError("[TokenManager] TokenData null, cannot set scale value"); return; } ScaleTokenType(refTokenData); break; case eScaleType.SharedTile: for (int i = 0; i < data.LTokenGameObject.Count; i++) { Debug.Log("<color=green>[TokenManager][TokenScaleFactor] Shared Tile</color>"); data.LTokenGameObject[i].transform.localScale = data.Vec2ScaleValue; Vector2 vec2RandomPosition = Random.insideUnitCircle * 0.15f; data.LTokenGameObject[i].transform.position += new Vector3(vec2RandomPosition.x, vec2RandomPosition.y, data.LTokenGameObject[i].transform.position.z); } break; } //Reseting scale to original size after highlighting them void ScaleTokenType(TokenData a_refTokenData) { switch (a_refTokenData.EnumTokenType) { case eTokenType.None: break; case eTokenType.Blue: for (int i = 0; i < m_lstBlueToken.Count; i++) { m_lstBlueToken[i].gameObject.transform.localScale = data.Vec2ScaleValue; } break; case eTokenType.Yellow: for (int i = 0; i < m_lstYellowToken.Count; i++) { m_lstYellowToken[i].gameObject.transform.localScale = data.Vec2ScaleValue; } break; case eTokenType.Red: for (int i = 0; i < m_lstRedToken.Count; i++) { m_lstRedToken[i].gameObject.transform.localScale = data.Vec2ScaleValue; } break; case eTokenType.Green: for (int i = 0; i < m_lstGreenToken.Count; i++) { m_lstGreenToken[i].gameObject.transform.localScale = data.Vec2ScaleValue; } break; } } }
/// <summary> /// Reads an interpolated string in format "${variable} some text ${othervariable + 2}." /// </summary> /// <param name="quote"></param> /// <param name="readLine">Whether or not to only read to the end of the line.</param> /// <param name="includeNewLine">Whether or not to include the new line in parsing.</param> /// <param name="setPositionAfterToken">Whether or not set the position of lexer after the token.</param> /// <returns></returns> public Token ReadInterpolatedString(char quote, bool readLine = false, bool includeNewLine = false, bool setPositionAfterToken = true) { var allTokens = new List <TokenData>(); var interpolationCount = 0; // Only supporting following: // 1. id's abcd with "_" // 2. "." // 3. math ops ( + - / * %) // "name" 'name' "name\"s" 'name\'" var buffer = new StringBuilder(); var curr = _scanner.ReadChar(); var next = _scanner.PeekChar(); var matched = false; var escapeChar = '\\'; Token token = null; while (_scanner.State.Pos <= _scanner.LAST_POSITION) { // End string " or ' if (!readLine && curr == quote) { matched = true; _scanner.MoveChars(1); break; } // End of line. if (readLine && (curr == '\r' || curr == '\n')) { matched = true; if (!includeNewLine) { break; } var is2CharNewLine = _scanner.ScanNewLine(curr); var newline = is2CharNewLine ? "\r\n" : "\n"; buffer.Append(newline); token = Tokens.NewLine; break; } // Interpolation. else if (curr == _interpolatedStartChar && next == '{') { // Keep track of interpolations and their start positions. interpolationCount++; int interpolatedStringStartPos = _scanner.State.LineCharPosition + 2; int interpolatedStringLinePos = _scanner.State.Line; // Add any existing text before the interpolation as a token. if (buffer.Length > 0) { string text = buffer.ToString(); token = TokenBuilder.ToLiteralString(text); var t = new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line }; allTokens.Add(t); buffer.Clear(); } _scanner.MoveChars(1); var tokens = ReadInterpolatedTokens(); token = TokenBuilder.ToInterpolated(string.Empty, tokens); var iTokenData = new TokenData() { Token = token, LineCharPos = interpolatedStringStartPos, Line = interpolatedStringLinePos }; allTokens.Add(iTokenData); } // Not an \ for escaping so just append. else if (curr != escapeChar) { buffer.Append(curr); } // Escape \ else if (curr == escapeChar) { var result = _scanner.ScanEscape(quote, false); buffer.Append(result.Text); _scanner.MoveChars(1); } curr = _scanner.ReadChar(); next = _scanner.PeekChar(); } // Error: Unterminated string constant. if (!matched && !readLine && _scanner.State.Pos >= _scanner.LAST_POSITION) { throw new LangException("Syntax Error", "Unterminated string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } // At this point the pos is already after token. // If matched and need to set at end of token, move back 1 char if (matched && !setPositionAfterToken) { _scanner.MoveChars(-1); } if (interpolationCount == 0) { var text = buffer.ToString(); return(TokenBuilder.ToLiteralString(text)); } if (buffer.Length > 0) { var text = buffer.ToString(); token = TokenBuilder.ToLiteralString(text); allTokens.Add(new TokenData() { Token = token, LineCharPos = 0, Line = _scanner.State.Line }); } return(TokenBuilder.ToInterpolated(string.Empty, allTokens)); }
private void SaveTokensToUserSession(UserSession.UserSession session, TokenData tokens) { _log.Info("Start saving tokens. TokenResponse:{TokenResponse}", tokens); session.Set("IroncladTokenResponse", tokens); }
/// <summary> /// Parses the fluent function call. /// </summary> /// <returns></returns> public override Expr Parse() { // 1. Is it a function call? var fnameToken = _tokenIt.NextToken; _tokenIt.Advance(_result.TokenCount); var remainderOfFuncName = string.Empty; var parts = new List <Expr>(); TokenData firstPart = null; // NOTES: // Given: function "Find user by" * // And: called via Find use by name role // wildcard part 1: name // wildcard part 2: role // full wildcard: "name role" var partsToken = _tokenIt.NextToken; // 1. Capture all the remaining parts of the wild card. while (_tokenIt.NextToken.Token.Kind == TokenKind.Ident) { var part = _tokenIt.NextToken.Token.Text; // a. Store the token of the first wildcard part if (firstPart == null) { firstPart = _tokenIt.NextToken; } // b. Build up the full name from all wildcards remainderOfFuncName += " " + part; // c. Create a constant expr from the wildcard // as it will be part of an array of strings passed to function var partExp = Exprs.Const(new LString(part), _tokenIt.NextToken); parts.Add(partExp); // d. Move to the next token for another possible wildcard. _tokenIt.Advance(); // e. Check for end of statement. if (_tokenIt.IsEndOfStmtOrBlock()) { break; } } var exp = new FunctionCallExpr(); exp.ParamListExpressions = new List <Expr>(); exp.ParamList = new List <object>(); remainderOfFuncName = remainderOfFuncName.Trim(); var fullWildCard = Exprs.Const(new LString(string.Empty), fnameToken) as ConstantExpr; // 2. Create a constant expr representing the full wildcard if (!string.IsNullOrEmpty(remainderOfFuncName)) { fullWildCard.Value = remainderOfFuncName; _parser.SetupContext(fullWildCard, firstPart); } var token = _tokenIt.NextToken.Token; // CASE 1: Parse parameters with parenthesis "(" if (token == Tokens.LeftParenthesis) { _parser.ParseParameters(exp, true, false, false); } // CASE 2: Parse parameters with ":" until newline. else if (token == Tokens.Colon) { _tokenIt.Advance(); _parser.ParseParameters(exp, false, false, true); } exp.NameExp = Exprs.Ident(_result.Name, fnameToken); // Have to restructure the arguments. // 1. const expr , fullwildcard, "name role" // 2. list<constexpr>, wildcard parts, ["name", "role"] // 3. list<expr>, args, "kishore", "admin" var args = new List <Expr>(); args.Add(fullWildCard); args.Add(Exprs.Array(parts, partsToken)); args.Add(Exprs.Array(exp.ParamListExpressions, fnameToken)); // Finally reset the parameters expr on the function call. exp.ParamListExpressions = args; return(exp); }
private Task SaveLykkeSession(string oldLykkeToken, TokenData tokens) { var lykkeSession = new LykkeSession.LykkeSession(oldLykkeToken, tokens); return(_lykkeSessionManager.SetAsync(lykkeSession)); }
internal static TokenData GetAuthenticated(string tokenCode, string address, CommonParam param) { TokenData result = null; try { if (String.IsNullOrWhiteSpace(tokenCode) || String.IsNullOrWhiteSpace(address)) { return(null); } if (String.IsNullOrWhiteSpace(Config.BASE_URI)) { LogSystem.Warn("Khong co cau hinh dia chi AAS"); return(null); } using (var client = new HttpClient()) { client.BaseAddress = new Uri(Config.BASE_URI); client.DefaultRequestHeaders.Accept.Clear(); client.Timeout = new TimeSpan(0, 0, Config.TIME_OUT); client.DefaultRequestHeaders.Add(HttpHeaderConstant.TOKEN_PARAM, tokenCode); client.DefaultRequestHeaders.Add(HttpHeaderConstant.ADDRESS_PARAM, address); HttpResponseMessage respenseMessage = client.GetAsync(AAS.URI.AasToken.GET_AUTHENTICATED).Result; LogSystem.Debug(string.Format("Request URI: {0}{1}", Config.BASE_URI, AAS.URI.AasToken.GET_AUTHENTICATED)); if (respenseMessage.IsSuccessStatusCode) { string responseData = respenseMessage.Content.ReadAsStringAsync().Result; LogSystem.Debug(string.Format("Response data: {0}", responseData)); ApiResultObject <TokenData> data = JsonConvert.DeserializeObject <ApiResultObject <TokenData> >(responseData); if (data != null && data.Data != null && data.Success) { result = data.Data; } else { LogSystem.Info(String.Format("Khong lay duoc TokenData. TokeCode = {0}, Address = {1}", tokenCode, address)); if (data.Param != null && data.Param.Messages != null && data.Param.Messages.Count > 0) { param.Messages.AddRange(data.Param.Messages); } if (data.Param != null && data.Param.BugCodes != null && data.Param.BugCodes.Count > 0) { param.BugCodes.AddRange(data.Param.BugCodes); } } } else { throw new Exception(string.Format("Loi khi goi API: {0}{1}. StatusCode: {2}", Config.BASE_URI, AAS.URI.AasToken.GET_AUTHENTICATED, respenseMessage.StatusCode.GetHashCode())); } } } catch (Exception ex) { LogSystem.Error(ex); result = null; } return(result); }
// Add a section of an ini file to game content // name is from the ini file and must start with the type // path is relative and is used for images or other paths in the content void AddContent(string name, Dictionary <string, string> content, string path, string packID) { // Is this a "TileSide" entry? if (name.IndexOf(TileSideData.type) == 0) { TileSideData d = new TileSideData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!tileSides.ContainsKey(name)) { tileSides.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (tileSides[name].priority < d.priority) { tileSides.Remove(name); tileSides.Add(name, d); } // items of the same priority belong to multiple packs else if (tileSides[name].priority == d.priority) { tileSides[name].sets.Add(packID); } } // Is this a "Hero" entry? if (name.IndexOf(HeroData.type) == 0) { HeroData d = new HeroData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!heros.ContainsKey(name)) { heros.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (heros[name].priority < d.priority) { heros.Remove(name); heros.Add(name, d); } // items of the same priority belong to multiple packs else if (heros[name].priority == d.priority) { heros[name].sets.Add(packID); } } // Is this a "Item" entry? if (name.IndexOf(ItemData.type) == 0) { ItemData d = new ItemData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!items.ContainsKey(name)) { items.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (items[name].priority < d.priority) { items.Remove(name); items.Add(name, d); } // items of the same priority belong to multiple packs else if (items[name].priority == d.priority) { items[name].sets.Add(packID); } } // Is this a "Monster" entry? if (name.IndexOf(MonsterData.type) == 0) { MonsterData d = new MonsterData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // Ignore monster activations if (name.IndexOf(ActivationData.type) != 0) { // If we don't already have one then add this if (!monsters.ContainsKey(name)) { monsters.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (monsters[name].priority < d.priority) { monsters.Remove(name); monsters.Add(name, d); } // items of the same priority belong to multiple packs else if (monsters[name].priority == d.priority) { monsters[name].sets.Add(packID); } } } // Is this a "Activation" entry? if (name.IndexOf(ActivationData.type) == 0) { ActivationData d = new ActivationData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!activations.ContainsKey(name)) { activations.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (activations[name].priority < d.priority) { activations.Remove(name); activations.Add(name, d); } // items of the same priority belong to multiple packs else if (activations[name].priority == d.priority) { activations[name].sets.Add(packID); } } // Is this a "Attack" entry? if (name.IndexOf(AttackData.type) == 0) { AttackData d = new AttackData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!investigatorAttacks.ContainsKey(name)) { investigatorAttacks.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (investigatorAttacks[name].priority < d.priority) { investigatorAttacks.Remove(name); investigatorAttacks.Add(name, d); } // items of the same priority belong to multiple packs else if (investigatorAttacks[name].priority == d.priority) { investigatorAttacks[name].sets.Add(packID); } } // Is this a "Evade" entry? if (name.IndexOf(EvadeData.type) == 0) { EvadeData d = new EvadeData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!investigatorEvades.ContainsKey(name)) { investigatorEvades.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (investigatorEvades[name].priority < d.priority) { investigatorEvades.Remove(name); investigatorEvades.Add(name, d); } // items of the same priority belong to multiple packs else if (investigatorEvades[name].priority == d.priority) { investigatorEvades[name].sets.Add(packID); } } // Is this a "Horror" entry? if (name.IndexOf(HorrorData.type) == 0) { HorrorData d = new HorrorData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!horrorChecks.ContainsKey(name)) { horrorChecks.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (horrorChecks[name].priority < d.priority) { horrorChecks.Remove(name); horrorChecks.Add(name, d); } // items of the same priority belong to multiple packs else if (horrorChecks[name].priority == d.priority) { horrorChecks[name].sets.Add(packID); } } // Is this a "Token" entry? if (name.IndexOf(TokenData.type) == 0) { TokenData d = new TokenData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!tokens.ContainsKey(name)) { tokens.Add(name, d); d.sets.Add(packID); } // If we do replace if this has higher priority else if (tokens[name].priority < d.priority) { tokens.Remove(name); tokens.Add(name, d); } // items of the same priority belong to multiple packs else if (tokens[name].priority == d.priority) { tokens[name].sets.Add(packID); } } // Is this a "Peril" entry? if (name.IndexOf(PerilData.type) == 0) { PerilData d = new PerilData(name, content); // Ignore invalid entry if (d.sectionName.Equals("")) { return; } // If we don't already have one then add this if (!perils.ContainsKey(name)) { perils.Add(name, d); } // If we do replace if this has higher priority else if (perils[name].priority < d.priority) { perils.Remove(name); perils.Add(name, d); } } // Is this a "Puzzle" entry? if (name.IndexOf(PuzzleData.type) == 0) { PuzzleData d = new PuzzleData(name, content, path); // Ignore invalid entry if (d.name.Equals("")) { return; } // If we don't already have one then add this if (!puzzles.ContainsKey(name)) { puzzles.Add(name, d); } // If we do replace if this has higher priority else if (puzzles[name].priority < d.priority) { puzzles.Remove(name); puzzles.Add(name, d); } } }
public AuthenticationService(IOptions <TokenData> tokenManagement) { _tokenManagement = tokenManagement.Value; }
//When the dice has been rolled and the token has been selected this will be called private void TokenSelected(TokenData a_refTokenData, int a_iDiceValue) { if (!a_refTokenData.BCanBeUsed) { Debug.LogError("[TokenManager] This Token cannot be moved"); return; } //Resets all the token after checking their movable state, making sure no user tried to mobe any other token if (m_OnResetToken != null) { m_OnResetToken.Invoke(); } Debug.Log("[TokenManager] Scale Effect Tween Paused: " + DOTween.Pause("ScaleEffect")); switch (a_refTokenData.EnumTokenType) { case GameUtility.Base.eTokenType.None: break; case GameUtility.Base.eTokenType.Blue: m_TokenToMove = m_lstBlueToken[a_refTokenData.ITokenID]; break; case GameUtility.Base.eTokenType.Yellow: m_TokenToMove = m_lstYellowToken[a_refTokenData.ITokenID]; break; case GameUtility.Base.eTokenType.Red: m_TokenToMove = m_lstRedToken[a_refTokenData.ITokenID]; break; case GameUtility.Base.eTokenType.Green: m_TokenToMove = m_lstGreenToken[a_refTokenData.ITokenID]; break; default: break; } m_lstTokengameobject.Add(a_refTokenData.gameObject); EventManager.Instance.TriggerEvent <EventTokenScaleFactor>(new EventTokenScaleFactor(m_lstTokengameobject, m_Vec3TokenOrginalScale, eScaleType.TokenType)); m_lstTokengameobject.Clear(); //calling the coroutine StartCoroutine(PlayerTurn(0.15f)); //This coroutine is local to this method IEnumerator PlayerTurn(float a_fDelay) { yield return(new WaitForSeconds(a_fDelay)); m_lstTokenMovePoints = PathManager.Instance.TokenStateUpdate(m_TokenToMove, a_iDiceValue); if (m_lstTokenMovePoints != null) { Debug.Log("<color=red>[TokenManager] m_TokenToMove: " + m_TokenToMove.ICurrentPathIndex + "</color>"); for (int i = 0; i < m_lstTokenMovePoints.Count; i++) { m_bMoveTweenComplete = false; m_TokenToMove.transform.DOMove((Vector2)m_lstTokenMovePoints[i].transform.position, 5, false).SetSpeedBased(true).OnComplete(MoveTweenComplete); while (!m_bMoveTweenComplete) { yield return(null); } } } m_TokenToMove.Vec2PositionOnTile = m_TokenToMove.transform.position; GameManager.Instance.CurrentPlayer.m_ePlayerState = ePlayerState.PlayerRollDice; if (m_TokenToMove.EnumTokenState == eTokenState.InRoute || m_TokenToMove.EnumTokenState == eTokenState.InHideOut) { m_refCurrentToken = m_TokenToMove; Debug.Log("<color=red>[TokenManager] Current Token InRoute,check if other token present in same tile: m_refCurrentToken: " + m_refCurrentToken.ICurrentPathIndex + "TokenState: " + m_TokenToMove.EnumTokenState + "</color>"); CheckIfTileContainsOtherTokens(); } //Checks if all Tokens are in heaven and the player has finished his game if (m_refCurrentToken.EnumTokenState == eTokenState.InHeaven) { int iBlueTokensFinished = 0, iYellowTokensFinished = 0, iRedTokensFinished = 0, iGreenTokensFinished = 0; switch (m_refCurrentToken.EnumTokenType) { case eTokenType.Blue: for (int i = 0; i < m_lstBlueToken.Count; i++) { if (m_lstBlueToken[i].EnumTokenState == eTokenState.InHeaven) { iBlueTokensFinished++; if (iBlueTokensFinished >= TOKENSPERPLAYER) { EventManager.Instance.TriggerEvent <EventPlayerFinished>(new EventPlayerFinished(m_refCurrentToken)); } } } break; case eTokenType.Yellow: for (int i = 0; i < m_lstBlueToken.Count; i++) { if (m_lstYellowToken[i].EnumTokenState == eTokenState.InHeaven) { iYellowTokensFinished++; if (iYellowTokensFinished >= TOKENSPERPLAYER) { EventManager.Instance.TriggerEvent <EventPlayerFinished>(new EventPlayerFinished(m_refCurrentToken)); } } } break; case eTokenType.Red: for (int i = 0; i < m_lstBlueToken.Count; i++) { if (m_lstRedToken[i].EnumTokenState == eTokenState.InHeaven) { iRedTokensFinished++; if (iRedTokensFinished >= TOKENSPERPLAYER) { EventManager.Instance.TriggerEvent <EventPlayerFinished>(new EventPlayerFinished(m_refCurrentToken)); } } } break; case eTokenType.Green: for (int i = 0; i < m_lstBlueToken.Count; i++) { if (m_lstGreenToken[i].EnumTokenState == eTokenState.InHeaven) { iGreenTokensFinished++; if (iGreenTokensFinished >= TOKENSPERPLAYER) { EventManager.Instance.TriggerEvent <EventPlayerFinished>(new EventPlayerFinished(m_refCurrentToken)); } } } break; } } GameManager.Instance.CheckPlayerChangeCondtion(); } }
/// <summary> /// Collects an unexpected token error and advances to next token. /// </summary> public void AddError(TokenData token, string error) { var ex = new LangException("Parse", error, this._scriptPath, token.Line, token.LineCharPos); this._parseErrors.Add(ex); }
/// <summary> /// Build a language exception due to the current token being invalid. /// </summary> /// <param name="unexpectedTokenText">The text of the token</param> /// <param name="token">The token causing the exception</param> /// <returns></returns> public LangException BuildSyntaxUnexpectedTokenException(string unexpectedTokenText, TokenData token) { return(new LangException("Syntax Error", string.Format("Unexpected token found {0}", unexpectedTokenText), ScriptPath, token.Line, token.LineCharPos)); }
private List <TokenData> ReadInterpolatedTokens() { var c = _scanner.ReadChar(); var n = _scanner.PeekChar(); var tokens = new List <TokenData>(); while (c != '}' && !_scanner.IsAtEnd()) { var pos = _scanner.State.Pos; // Variable if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Empty space. else if (c == ' ' || c == '\t') { _lastToken = Tokens.WhiteSpace; } else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { throw new LangException("syntax", "unexpected text in string", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } var t = new TokenData() { Token = _lastToken, Line = _scanner.State.Line, LineCharPos = _scanner.State.LineCharPosition, Pos = pos }; tokens.Add(t); // Single char symbol - char advancement was not made. if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos) { _scanner.ReadChar(); } c = _scanner.State.CurrentChar; n = _scanner.PeekChar(); } return(tokens); }
protected override async Task <HttpResponseMessage> SendAsync( HttpRequestMessage request, CancellationToken cancellationToken) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field int num = (^ this).\u003C\u003E1__state; HttpResponseMessage response; HttpResponseMessage result1; try { response = (HttpResponseMessage)null; try { TaskAwaiter awaiter1 = this._semaphore.WaitAsync().GetAwaiter(); if (!awaiter1.IsCompleted) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num = 0; TaskAwaiter taskAwaiter = awaiter1; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter, AuthMessageHandler.\u003CSendAsync\u003Ed__5>(ref awaiter1, this); return; } awaiter1.GetResult(); TaskAwaiter <HttpRequestMessage> taskAwaiter1; HttpRequestMessage result2; if (this._secureStore.GetTokenData(this._profile.Id).ExpiresAt <= DateTime.Now) { TaskAwaiter <TokenData> awaiter2 = ((IAuthDataService)Mvx.get_IoCProvider().Resolve <IAuthDataService>()).RefreshToken(this._profile).GetAwaiter(); if (!awaiter2.IsCompleted) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num = 1; TaskAwaiter <TokenData> taskAwaiter2 = awaiter2; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter <TokenData>, AuthMessageHandler.\u003CSendAsync\u003Ed__5>(ref awaiter2, this); return; } TokenData result3 = awaiter2.GetResult(); if (result3 == null) { result1 = (HttpResponseMessage)null; goto label_26; } else { this._profile.TokenInfo = (TokenInfo)JsonConvert.DeserializeObject <TokenInfo>(TokenDecoder.Decode(result3.AccessToken)); TaskAwaiter <HttpRequestMessage> awaiter3 = this.CloneRequest(request, result3).GetAwaiter(); if (!awaiter3.IsCompleted) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num = 2; taskAwaiter1 = awaiter3; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter <HttpRequestMessage>, AuthMessageHandler.\u003CSendAsync\u003Ed__5>(ref awaiter3, this); return; } result2 = awaiter3.GetResult(); } } else { TaskAwaiter <HttpRequestMessage> awaiter2 = this.CloneRequest(request, (TokenData)null).GetAwaiter(); if (!awaiter2.IsCompleted) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num = 3; taskAwaiter1 = awaiter2; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter <HttpRequestMessage>, AuthMessageHandler.\u003CSendAsync\u003Ed__5>(ref awaiter2, this); return; } result2 = awaiter2.GetResult(); } TaskAwaiter <HttpResponseMessage> awaiter4 = base.SendAsync(result2, cancellationToken).GetAwaiter(); if (!awaiter4.IsCompleted) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num = 4; TaskAwaiter <HttpResponseMessage> taskAwaiter2 = awaiter4; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter <HttpResponseMessage>, AuthMessageHandler.\u003CSendAsync\u003Ed__5>(ref awaiter4, this); return; } response = awaiter4.GetResult(); result1 = response; } catch (InvalidOperationException ex) { result1 = response; } catch (HttpRequestException ex) { if (!((IConnectivity)Mvx.get_IoCProvider().Resolve <IConnectivity>()).get_IsConnected()) { throw new NoInternetConnectionException().SetErrorCode <NoInternetConnectionException>("/Users/admin/myagent/macMiniBlack3/_work/2/s/eKreta.Mobile/eKreta.Mobile.Core.Standard/Services/Handlers/AuthMessageHandler.cs", 90); } throw; } finally { if (num < 0) { this._semaphore.Release(); } } } catch (Exception ex) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = -2; response = (HttpResponseMessage)null; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.SetException(ex); return; } label_26: // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = -2; response = (HttpResponseMessage)null; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.SetResult(result1); }
/// <summary> /// Build a language exception due to the current token being invalid. /// </summary> /// <param name="errorMessage"></param> /// <param name="token">The token associated with the error.</param> /// <returns></returns> public LangException BuildSyntaxException(string errorMessage, TokenData token) { return(new LangException("Syntax Error", errorMessage, ScriptPath, token.Line, token.LineCharPos)); }
public void TestCase2() { // TEST_CASE_2,测试案例2, 10秒有效,最多次使用1次,无日志 string resultMsg = null; // ########## 这里目的是测试 最多次使用1次 ########## Dictionary <string, object> userData = new Dictionary <string, object>(); userData.Add("UserData", "TEST_CASE_2_CREATE"); // 创建 Token. Guid token1 = this.tokenManager.NewToken("TEST_CASE_2", userData, ref resultMsg); // 结果非 Guid.Empty. Assert.AreNotEqual(Guid.Empty, token1); // 首次调用. TokenData tokenData = this.tokenManager.AccessToken(token1, null, ref resultMsg); // 结果非 Null Assert.IsNotNull(tokenData); // 二次调用. tokenData = this.tokenManager.AccessToken(token1, null, ref resultMsg); // 结果为 Null Assert.IsNull(tokenData); // ########## 这里目的是测试 10秒有效 ########## Dictionary <string, object> userData2 = new Dictionary <string, object>(); userData2.Add("UserData", "TEST_CASE_2_TIMEOUT"); // 创建 Token. Guid token2 = this.tokenManager.NewToken("TEST_CASE_2", userData2, ref resultMsg); // 结果非 Guid.Empty. Assert.AreNotEqual(Guid.Empty, token2); // 休眠11秒. 让 Token 过期. System.Threading.Thread.Sleep(11000); // 首次调用. tokenData = this.tokenManager.AccessToken(token2, null, ref resultMsg); // 结果为 Null Assert.IsNull(tokenData); // ########## 这里目的是测试 无日志 ########## var logList = this.tokenManager.GetTokenAccessLog(token1); // 结果非 Null Assert.IsNotNull(logList); // 日志行数为零. Assert.AreEqual(0, logList.Count); logList = this.tokenManager.GetTokenAccessLog(token2); // 结果非 Null Assert.IsNotNull(logList); // 日志行数为零. Assert.AreEqual(0, logList.Count); }
private async Task <HttpRequestMessage> CloneRequest( HttpRequestMessage request, TokenData tokenData = null) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field int num1 = (^ this).\u003C\u003E1__state; HttpRequestMessage result; HttpRequestMessage result1; try { result = new HttpRequestMessage(request.Method, request.RequestUri); IEnumerator <KeyValuePair <string, IEnumerable <string> > > enumerator1 = request.Headers.GetEnumerator(); try { while (enumerator1.MoveNext()) { KeyValuePair <string, IEnumerable <string> > current = enumerator1.Current; if (tokenData != null && current.Key == "Authorization") { result.Headers.Add(current.Key, string.Format("Bearer " + tokenData.AccessToken)); } else { result.Headers.Add(current.Key, current.Value); } } } finally { if (num1 < 0 && enumerator1 != null) { enumerator1.Dispose(); } } if (request.Content?.Headers.ContentType != null) { TaskAwaiter <string> awaiter = request.Content.ReadAsStringAsync().GetAwaiter(); if (!awaiter.IsCompleted) { int num2; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = num2 = 0; TaskAwaiter <string> taskAwaiter = awaiter; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.AwaitUnsafeOnCompleted <TaskAwaiter <string>, AuthMessageHandler.\u003CCloneRequest\u003Ed__6>(ref awaiter, this); return; } result.Content = (HttpContent) new StringContent(awaiter.GetResult(), Encoding.UTF8, request.Content.Headers.ContentType.MediaType); IEnumerator <KeyValuePair <string, IEnumerable <string> > > enumerator2 = request.Content.Headers.GetEnumerator(); try { while (enumerator2.MoveNext()) { KeyValuePair <string, IEnumerable <string> > current = enumerator2.Current; if (!current.Key.Equals("Content-Type", StringComparison.OrdinalIgnoreCase)) { result.Content.Headers.Add(current.Key, current.Value); } } } finally { if (num1 < 0 && enumerator2 != null) { enumerator2.Dispose(); } } } result1 = result; } catch (Exception ex) { // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = -2; result = (HttpRequestMessage)null; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.SetException(ex); return; } // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003E1__state = -2; result = (HttpRequestMessage)null; // ISSUE: explicit reference operation // ISSUE: reference to a compiler-generated field (^ this).\u003C\u003Et__builder.SetResult(result1); }
public TokenData ApiTokenOnly([FromBody] LoginInfo loginInfo) { if (loginInfo == null) { return(new TokenData { token = "Invalid user", expiration = DateTime.Today }); } string domain = loginInfo.domain; string userName = loginInfo.userName; string password = loginInfo.password; string groupcode = loginInfo.groupcode; var query = from r in _context.HREditorialUserMap where r.HumanReviewUserID == userName select r; if (query.Any()) { // TODO: catch exceptions var thisUser = query.Single(); var rolequery = (from users in _context.AppUser join roles in _context.AppUserInRole on users.AppUserID equals roles.AppUserID join roletypes in _context.RoleType on roles.RoleTypeID equals roletypes.RoleTypeID where users.AppUserID == thisUser.AppUserID select new { roletypes.RoleTypeID, users.AppUserName }).Single(); string TokenKey = _configuration.GetSection("TokenValues:key").Value; string TokenIssuer = _configuration.GetSection("TokenValues:issuer").Value; string TokenAudience = _configuration.GetSection("TokenValues:audience").Value; string TokenExpiresInMinutes = _configuration.GetSection("TokenValues:expiresInMinutes").Value; if (thisUser.HumanReviewUserID == userName) { // TODO: Log information here var claimsData = new[] { new Claim(JwtRegisteredClaimNames.Sub, userName), // HR identity new Claim(JwtRegisteredClaimNames.GivenName, rolequery.AppUserName), // Editorial identity new Claim("rol", rolequery.RoleTypeID.ToString()) // Editorial Role }; var key = new SymmetricSecurityKey(System.Text.Encoding.UTF8.GetBytes(TokenKey)); var signInCreds = new SigningCredentials(key, SecurityAlgorithms.HmacSha256); var token = new JwtSecurityToken( issuer: TokenIssuer, audience: TokenAudience, expires: DateTime.UtcNow.AddMinutes(Convert.ToInt32(TokenExpiresInMinutes)), claims: claimsData, signingCredentials: signInCreds ); //token.Payload["IPa"] = UserIPa; var ApiToken = new JwtSecurityTokenHandler().WriteToken(token); var ApiTokenExpiration = token.ValidTo; TokenData apitokendata = new TokenData { token = ApiToken, expiration = ApiTokenExpiration }; return(apitokendata); } } return(new TokenData { token = "Login failed, no token provided", expiration = DateTime.Today }); }
private void ProcessWhitepace(int currentToken, NormalizeWhitespace normalizeFunction, TokenData token) { string originalWhiteSpace = GetTextForCurrentToken(currentToken); if (HasPreviousToken(currentToken)) { TokenData previousToken = PreviousTokenData(currentToken); if (previousToken.TokenId == FormatterTokens.LEX_END_OF_LINE_COMMENT) { if (originalWhiteSpace.StartsWith("\n", StringComparison.OrdinalIgnoreCase) && RuntimeInformation.IsOSPlatform(OSPlatform.Windows)) { // Replace \n with \r\n on Windows platforms originalWhiteSpace = Environment.NewLine + originalWhiteSpace.Substring(1); } } } string newWhiteSpace = normalizeFunction(originalWhiteSpace, Visitor.Context); AddReplacement(new Replacement(token.StartIndex, GetTextForCurrentToken(currentToken), newWhiteSpace)); }
protected bool IsTokenWhitespace(TokenData tokenData) { return(TokenManager.IsTokenWhitespace(tokenData.TokenId)); }
/// <summary> /// Constructor /// </summary> public TokenCollection() { _data = new TokenData(); }
protected void DebugAssertTokenIsWhitespaceOrComment(TokenData td, int tokenIndex) { Debug.Assert(TokenManager.IsTokenComment(td.TokenId) || IsTokenWhitespace(td), string.Format(CultureInfo.CurrentCulture, "Unexpected token \"{0}\", expected whitespace or comment.", GetTextForCurrentToken(tokenIndex)) ); }
/// <summary> /// Constructor /// </summary> /// <param name="pData"></param> private TokenCollection(TokenData pData) { _data = pData; }
/// <summary> /// run step 123. /// </summary> /// <returns></returns> public override Token[] Parse() { // http https ftp ftps www var takeoverToken = _lexer.LastTokenData; var line = _lexer.State.Line; var pos = _lexer.State.LineCharPosition; _lexer.Scanner.ReadChar(); var token = _lexer.ReadNumber(); var finalText = takeoverToken.Token.Text + "." + token.Text; var lineToken = TokenBuilder.ToLiteralVersion(finalText); var t = new TokenData() { Token = lineToken, Line = line, LineCharPos = pos }; _lexer.ParsedTokens.Add(t); return new Token[] { lineToken }; }
/// <summary> /// Creates a function call expression. /// </summary> /// <param name="nameExpr"></param> /// <param name="parameters"></param> /// <param name="token"></param> /// <returns></returns> public static Expr MemberAccess(Expr nameExpr, string memberName, bool isAssignment, TokenData token) { var exp = new MemberAccessExpr(); exp.IsAssignment = isAssignment; exp.VarExp = nameExpr; exp.MemberName = memberName; SetupContext(exp, token); return(exp); }
/// <summary> /// Handles a comment token. /// </summary> /// <param name="tokenData"></param> /// <param name="token"></param> protected void HandleComment(TokenData tokenData, Token token) { if (token.Text.StartsWith("@summary") || token.Text.StartsWith(" @summary")) { _hasSummaryComments = true; _lastCommentToken = tokenData; } if (_hasSummaryComments) _comments.Add(token); // Finally advance the token. _tokenIt.Advance(); }
private static byte[] GetRefreshTokenBytes(TokenData data) { var consumerBytes = BitConverter.GetBytes(data.ConsumerId); var userBytes = BitConverter.GetBytes(data.ResourceOwnerId); var tickBytes = BitConverter.GetBytes(data.Timestamp); var randomBytes = GetRandomBytes(8); var bytes = new byte[32]; var index = 0; for (int i = 0; i < 8; i++) { bytes[index] = tickBytes[i]; bytes[index + 1] = consumerBytes[i]; bytes[index + 2] = userBytes[i]; bytes[index + 3] = randomBytes[i]; index += 4; } return bytes; }
/// <summary> /// 访问令牌. /// </summary> /// <param name="tokenID">Token ID</param> /// <param name="userData">用户数据</param> /// <param name="resultMsg">结果消息</param> /// <returns></returns> public TokenData AccessToken(Guid tokenID, Dictionary <string, object> userData, ref string resultMsg) { if (logger.IsDebugEnabled) { logger.DebugFormat(@"AccessToken(tokenID = {0}, userData = {1}) Start!", tokenID, userData); } // 令牌. TokenData result = null; // 令牌类型. TokenType tokenType = null; try { // 取得令牌. result = GetTokenData(tokenID); if (result == null) { resultMsg = "令牌不存在"; return(null); } // 取得令牌类型. tokenType = GetTokenType(result.TokenTypeCode); // 是否可用. if (!result.IsUseable) { resultMsg = "令牌超时"; return(null); } if (tokenType.AccessAbleTimesLimit > 0) { // 配置限制了单个令牌的可访问次数. if (result.AccessCount >= tokenType.AccessAbleTimesLimit) { resultMsg = "令牌使用次数超出限额"; return(null); } } // 访问次数递增. result.AccessCount++; // 保存令牌数据. SaveTokenData(result); resultMsg = SUCCESS_MESSAGE; if (!String.IsNullOrEmpty(result.UserData)) { result.UserDataObject = JsonConvert.DeserializeObject <Dictionary <string, object> >(result.UserData); } return(result); } catch (Exception ex) { logger.Error(ex.Message, ex); resultMsg = ex.Message; return(null); } finally { if (tokenType != null && tokenType.IsRequireAccessLog) { // 需要记录访问日志. TokenAccessLog accessLog = new TokenAccessLog(); // 令牌ID。 accessLog.TokenID = tokenID; // 访问时间. accessLog.AccessTime = DateTime.Now; // 用户数据, 以Json格式存储. if (userData != null) { accessLog.UserData = JsonConvert.SerializeObject(userData); } // 处理结果. accessLog.AccessResult = resultMsg; // 保存访问日志 SaveTokenAccessLog(accessLog); } if (logger.IsDebugEnabled) { logger.DebugFormat(@"AccessToken(tokenID = {0}, userData = {1}, resultMsg = {2}) Finish!", tokenID, userData, resultMsg); } } }
public override bool OnFunc(int func, Deque <TokenData> parm, ref TokenData ret) { switch (func) { case (int)Std._return: return(true); case (int)Std.trace: return(FuncTrace(parm[0].GetStr())); case (int)Std.rand: // trace return(true); case (int)Std.lot: // lot return(FuncLot(parm[0].GetNum(), parm[1].GetNum())); case (int)Std.OnInit: // OnInit return(FuncLabel(NPCACTION.ONINIT, parm)); case (int)Std.OnClick: // OnClick return(FuncLabel(NPCACTION.ONCLICK, parm)); case (int)Std.OnTouch: // OnTouch return(FuncLabel(NPCACTION.ONTOUCH, parm)); case (int)Std.OnMyMobDead: // OnMyMobDead return(FuncLabel(NPCACTION.ONMYMOBDEAD, parm)); case (int)Std.OnTimer: // OnTimer return(FuncLabel(NPCACTION.ONTIMER, parm)); case (int)Std.OnCommand: // OnCommand return(FuncLabel(NPCACTION.ONCOMMAND, parm)); case (int)Std.OnStartArena: // OnStartArena return(FuncLabel(NPCACTION.ONSTARTARENA, parm)); case (int)Std.OnTouchNPC: // OnTouchNPC return(FuncLabel(NPCACTION.ONTOUCHNPC, parm)); case (int)Std.OnTouch2: // OnTouch2 return(FuncLabel(NPCACTION.ONTOUCH2, parm)); case (int)Std.OnMoveNpcCmd_XXX: // OnMoveNpcCmd:_XXX return(FuncLabel(NPCACTION.ONMOVENPCCMD, parm)); case (int)Std.OnCampCommand: // OnCampCommand return(FuncLabel(NPCACTION.ONCAMPCOMMAND, parm)); case (int)Std.OnCampCommand2: // OnCampCommand2 return(FuncLabel(NPCACTION.ONCAMPCOMMAND2, parm)); case (int)Std.OnAgitInvest: // OnAgitInvest return(FuncLabel(NPCACTION.ONAGITINVEST, parm)); case (int)Std.OnMyMobDead_NPC: // OnMyMobDead_NPC return(FuncLabel(NPCACTION.ONMYMOBDEAD_NPC, parm)); case (int)Std.OnPlayerDead: // OnPlayerDead return(FuncLabel(NPCACTION.ONPLAYERDEAD, parm)); case (int)Std.npc: // npc { var npcInfo = new NpcSvrNpcInfo { Zone = parm[0].GetStr(), Name = parm[1].GetStr(), Sprite = parm[2].GetNum(), X = parm[3].GetNum(), Y = parm[4].GetNum(), Direction = parm[5].GetNum(), W = parm[6].GetNum(), H = parm[7].GetNum() }; return(FuncNpc(npcInfo)); } case (int)Std.mob: return(true); case (int)Std.warp: { var npcInfo = new NpcSvrNpcInfo { Zone = parm[0].GetStr(), Name = parm[1].GetStr(), X = parm[2].GetNum(), Y = parm[3].GetNum(), W = parm[4].GetNum(), H = parm[5].GetNum() }; return(FuncNpc(npcInfo)); } case (int)Std.dialog: Debug.Print($"dialog \"{parm[0].GetStr()}\""); return(true); case (int)Std.moveto: return(true); case (int)Std.wait: _waitType = 1; _waitTime = DateTime.UtcNow.AddMinutes(1); return(true); case (int)Std.close: return(true); case (int)Std.checkpoint: return(true); case (int)Std.DisableItemMove: return(FuncDisableItemMove()); case (int)Std.EnableItemMove: return(FuncEnableItemMove()); } //bool result = false; //if (_npcId != null) // result = DefaultNpcFunc(_npcId, parm, func, ret); //else // result = GlobalFunc(func, parm, v4, ret); Debug.Print($"Missing function: {func.ToString()}"); return(true); }
public override bool OnFunc(int func, Deque <TokenData> parm, ref TokenData ret) { switch (func) { case (int)Functions.SetSkill: _currentSkill = new Skill { Id = parm[0].num }; Skills.Add(_currentSkill); _onInit = false; break; case (int)Functions.SkillType: _currentSkill.Type = (byte)parm[0].num; break; case (int)Functions.SkillMaxLv: _currentSkill.MaxLevel = (byte)parm[0].num; break; case (int)Functions.SkillPattern: _currentSkill.Pattern = (byte)parm[0].num; break; case (int)Functions.SkillProperty: _currentSkill.Property = (byte)parm[0].num; break; case (int)Functions.SkillFlag: var flag = 0; for (var i = 0; i < parm.Count - 1; i++) { flag += parm[i].num; } if (flag > 0) { _currentSkill.Flag = flag; } break; case (int)Functions.SkillEvent: _currentSkill.Event = true; break; case (int)Functions.SkillSinglePostDelay: var cooldown = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { cooldown.Add(parm[i].num); } _currentSkill.Cooldown = cooldown.ToArray(); break; case (int)Functions.SkillGlobalPostDelay: var globalCooldown = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { globalCooldown.Add(parm[i].num); } _currentSkill.GlobalCooldown = globalCooldown.ToArray(); break; case (int)Functions.SkillCastStatDelay: var variableCastTime = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { variableCastTime.Add(parm[i].num); } _currentSkill.VariableCastTime = variableCastTime.ToArray(); break; case (int)Functions.SkillCastFixedDelay: var fixedCastTime = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { fixedCastTime.Add(parm[i].num); } _currentSkill.FixedCastTime = fixedCastTime.ToArray(); break; case (int)Functions.SkillRange: var range = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { range.Add(parm[i].num); } _currentSkill.Range = range.ToArray(); break; case (int)Functions.SkillSP: var sp = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { sp.Add(parm[i].num); } _currentSkill.Sp = sp.ToArray(); break; case (int)Functions.SkillSplash: var splash = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { splash.Add(parm[i].num); } _currentSkill.Splash = splash.ToArray(); break; case (int)Functions.SkillHitPer: var hitPer = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { hitPer.Add(parm[i].num); } _currentSkill.HitPer = hitPer.ToArray(); break; case (int)Functions.SkillAtkPer: var atkPer = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { atkPer.Add(parm[i].num); } _currentSkill.AtkPer = atkPer.ToArray(); break; case (int)Functions.SkillData1: var data1 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data1.Add(parm[i].num); } _currentSkill.Data1 = data1.ToArray(); break; case (int)Functions.SkillData2: var data2 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data2.Add(parm[i].num); } _currentSkill.Data2 = data2.ToArray(); break; case (int)Functions.SkillData3: var data3 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data3.Add(parm[i].num); } _currentSkill.Data3 = data3.ToArray(); break; case (int)Functions.SkillData4: var data4 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data4.Add(parm[i].num); } _currentSkill.Data4 = data4.ToArray(); break; case (int)Functions.SkillData5: var data5 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data5.Add(parm[i].num); } _currentSkill.Data5 = data5.ToArray(); break; case (int)Functions.SkillData6: var data6 = new List <int>(); for (var i = 0; i < parm.Count - 1; i++) { data6.Add(parm[i].num); } _currentSkill.Data6 = data6.ToArray(); break; case (int)Functions.OnSuccess: case (int)Functions.OnUse: case (int)Functions.SkillHandicap: case (int)Functions.SkillResetHandicap: case (int)Functions.SkillNoDamage: case (int)Functions.SkillKnockBack: case (int)Functions.SkillDrainHP: case (int)Functions.SkillDrainSP: case (int)Functions.SkillPostDelayTime: case (int)Functions.SkillPreDelayTime: case (int)Functions.SkillDataString: break; case (int)Functions.OnInit: _onInit = true; break; case (int)Functions.Return: _onInit = false; break; default: Console.WriteLine($"Unknown Function {func}"); break; } return(true); }
/// <summary> /// Applies the last doc tags to the function statement. /// </summary> /// <param name="stmt"></param> protected void ApplyDocTagsToFunction(Expr stmt) { if (!_hasSummaryComments) return; if (!(stmt.IsNodeType(NodeTypes.SysFunctionDeclare))) { throw _tokenIt.BuildSyntaxUnexpectedTokenException(_lastCommentToken); } // Get the function associated w/ the declaration. // Parse the doc tags. // Apply the doc tags to the function. var func = ((FunctionDeclareExpr)stmt).Function; var tags = DocHelper.ParseDocTags(_comments); func.Meta.Doc = tags.Item1; // Associate all the argument specifications to the function metadata foreach (var arg in tags.Item1.Args) { if (string.IsNullOrEmpty(arg.Name)) continue; if (!func.Meta.ArgumentsLookup.ContainsKey(arg.Name)) _tokenIt.BuildSyntaxException("Doc argument name : '" + arg.Name + "' does not exist in function : " + func.Name); var funcArg = func.Meta.ArgumentsLookup[arg.Name]; funcArg.Alias = arg.Alias; funcArg.Desc = arg.Desc; funcArg.Examples = arg.Examples; funcArg.Type = arg.Type; // Now associate the alias to the arg names. func.Meta.ArgumentsLookup[funcArg.Alias] = funcArg; if (!string.IsNullOrEmpty(funcArg.Alias)) { func.Meta.ArgumentNames[funcArg.Alias] = funcArg.Alias; } } // Clear the comment state. _comments.Clear(); _hasSummaryComments = false; _lastCommentToken = null; }
/// <summary> /// Get the next batch of tokens. /// </summary> /// <param name="count"></param> /// <returns></returns> public List <TokenData> GetTokenBatch(int count) { _tokenIndex = -1; _tokens = new List <TokenData>(); var hasPlugins = _ctx.Plugins.TotalLexical > 0; var hasJsPlugins = _ctx.PluginsMeta.TotalLex() > 0; TokenData last = null; while (true) { var token = NextToken(); //PerformDiagnostics(token); // Set the index position of the token in the script. if (token.Token != Tokens.WhiteSpace) { _tokenIndex++; token.SetIndex(_tokenIndex); } // 1. End of script ? if (token.Token == Tokens.EndToken) { _tokens.Add(token); break; } // 2. Null token ? if (token.Token == null) { continue; } // Avoid storing white space tokens. if (token.Token != Tokens.WhiteSpace) { var isNewLine = token.Token == Tokens.NewLine; // 3a. Plugins? if (!isNewLine && hasJsPlugins && _ctx.PluginsMeta.CanHandleLex(token.Token)) { var visitor = this.OnDemandEvaluator; var parsedToken = _ctx.PluginsMeta.ParseLex(visitor); } // 3b. Plugins? else if (!isNewLine && hasPlugins && _ctx.Plugins.CanHandleLex(token.Token)) { var plugin = _ctx.Plugins.LastMatchedLexPlugin; plugin.Parse(); } // 4. Can immediately add to tokens ? else if (!_hasReplacementsOrRemovals) { _tokens.Add(token); } // 5. Special Handling Cases // Case 1: Replace token ? else if (_replacements.ContainsKey(token.Token.Text)) { var replaceVal = _replacements[token.Token.Text]; // Replaces system token? if (Tokens.ContainsKey(replaceVal)) { var t = Tokens.GetToken(replaceVal); token.Token = t; } else { token.Token.SetText(replaceVal); } _tokens.Add(token); } // Case 2: Remove token ? else if (!_removals.ContainsKey(token.Token.Text)) { _tokens.Add(token); } } // If only getting limited number of tokens then get // the specified count number of tokens. if (count != -1 && _tokens.Count >= count) { break; } //DEBUG.ASSERT. Did not progress somehow. if (last == token) { throw new LangException("Syntax Error", "Unexpected token", string.Empty, _scanner.State.Line, _scanner.State.LineCharPosition); } last = token; } return(_tokens); }
/// <summary> /// Reads the next token from the reader. /// </summary> /// <returns> A token, or <c>null</c> if there are no more tokens. </returns> public TokenData NextToken() { // !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!! // LEXER ALWAYS READS NEXT CHAR var c = _scanner.State.CurrentChar; var n = _scanner.PeekChar(); //var tokenLengthCalcMode = TokenLengthCalcMode.Direct; var pos = _scanner.State.Pos; var line = _scanner.State.Line; var tokenLength = 0; var cpos = _scanner.State.LineCharPosition; if (_scanner.IsEnded()) { _lastToken = Tokens.EndToken; } // Empty space. else if (c == ' ' || c == '\t') { _scanner.ConsumeWhiteSpace(false, true); _lastToken = Tokens.WhiteSpace; tokenLength = (_scanner.State.Pos - pos) + 1; //tokenLengthCalcMode = TokenLengthCalcMode.WhiteSpace; } // Variable else if (_scanner.IsIdentStart(c)) { _lastToken = ReadWord(); } // Single line else if (c == '/' && n == '/') { _scanner.MoveChars(2); var result = _scanner.ScanToNewLine(false, true); //tokenLengthCalcMode = TokenLengthCalcMode.String; tokenLength = (_scanner.State.Pos - pos) + 1; _lastToken = TokenBuilder.ToComment(false, result.Text); } // Multi-line else if (c == '/' && n == '*') { _scanner.MoveChars(2); var result = _scanner.ScanUntilChars(false, '*', '/', false, true); //tokenLengthCalcMode = TokenLengthCalcMode.MultilineComment; tokenLength = _scanner.State.LineCharPosition; _lastToken = TokenBuilder.ToComment(true, result.Text); } else if (c == '|' && n != '|') { _lastToken = Tokens.Pipe; } // Operator ( Math, Compare, Increment ) * / + -, < < > >= ! = else if (_scanner.IsOp(c) == true) { _lastToken = ReadOperator(); } else if (c == '(') { _lastToken = Tokens.LeftParenthesis; } else if (c == ')') { _lastToken = Tokens.RightParenthesis; } else if (c == '[') { _lastToken = Tokens.LeftBracket; } else if (c == ']') { _lastToken = Tokens.RightBracket; } else if (c == '.') { _lastToken = Tokens.Dot; } else if (c == ',') { _lastToken = Tokens.Comma; } else if (c == ':') { _lastToken = Tokens.Colon; } else if (c == '{') { _lastToken = Tokens.LeftBrace; } else if (c == '}') { _lastToken = Tokens.RightBrace; } else if (c == ';') { _lastToken = Tokens.Semicolon; } else if (c == '$') { _lastToken = Tokens.Dollar; } else if (c == '@') { _lastToken = Tokens.At; } else if (c == '#') { _lastToken = Tokens.Pound; } else if (c == '?') { _lastToken = Tokens.Question; } else if (c == '\\') { _lastToken = Tokens.BackSlash; } // String literal else if (c == '"' || c == '\'') { _lastToken = ReadString(c == '"'); //tokenLengthCalcMode = TokenLengthCalcMode.String; if (_lastToken.Kind == TokenKind.Multi) { tokenLength = (_scanner.State.Pos - pos) - 2; string text = _scanner.State.Text.Substring(pos + 1, tokenLength); _lastToken.SetText(text); } else { tokenLength = _lastToken.Text.Length + 2; } } else if (_scanner.IsNumeric(c)) { _lastToken = ReadNumber(); } else if (c == '\r') { bool is2CharNewline = n == '\n'; IncrementLine(is2CharNewline); } else { _lastToken = Tokens.Unknown; } var t = new TokenData() { Token = _lastToken, Line = line, LineCharPos = cpos, Pos = pos }; _lastTokenData = t; // Single char symbol - char advancement was not made. if ((t.Token.Kind == TokenKind.Symbol || t.Token.Type == TokenTypes.Unknown || t.Token.Type == TokenTypes.WhiteSpace) && _scanner.State.Pos == pos) { _scanner.ReadChar(); } return(t); }
public void Get(string url, INetClient client, TokenData token) { StartCoroutine(AsyncGet(url, client, token)); }