public override Task EnqueueForRetry(OutgoingMessageBatch batch) { Task execute(CancellationToken c) => enqueueForRetry(batch); return(_policy.ExecuteAsync(execute, _settings.Cancellation)); }
public async Task RunAsync() { await _retryPolicy.ExecuteAsync(_func) .ConfigureAwait(false); }
public async Task <List <Meeting> > GetAllMeetings() { return(await policy.ExecuteAsync(async() => await meetingService.GetAllMeetings())); }
public async Task ExecuteAsync(TArgs args) { await _retryPolicy .ExecuteAsync(async() => await _handler.ExecuteAsync(args).ConfigureAwait(false)) .ConfigureAwait(false); }
public Task MarkComplete() { return(_policy.ExecuteAsync(() => _persistence.DeleteIncomingEnvelope(_envelope))); }
public async Task <ManagedCertificate> Update(ManagedCertificate managedCertificate) { if (managedCertificate == null) { return(null); } try { await _dbMutex.WaitAsync(10 * 1000).ConfigureAwait(false); if (managedCertificate.Id == null) { managedCertificate.Id = Guid.NewGuid().ToString(); } managedCertificate.Version++; if (managedCertificate.Version == long.MaxValue) { // rollover version, unlikely but accomodate it anyway managedCertificate.Version = -1; } await _retryPolicy.ExecuteAsync(async() => { using (var db = new SQLiteConnection(_connectionString)) { await db.OpenAsync(); ManagedCertificate current = null; // get current version from DB using (var tran = db.BeginTransaction()) { using (var cmd = new SQLiteCommand("SELECT json FROM manageditem WHERE id=@id", db)) { cmd.Parameters.Add(new SQLiteParameter("@id", managedCertificate.Id)); using (var reader = await cmd.ExecuteReaderAsync()) { if (await reader.ReadAsync()) { current = JsonConvert.DeserializeObject <ManagedCertificate>((string)reader["json"]); current.IsChanged = false; } reader.Close(); } } if (current != null) { if (managedCertificate.Version != -1 && current.Version >= managedCertificate.Version) { // version conflict _log?.Error("Managed certificate DB version conflict - newer managed certificate version already stored. UI may have updated item while request was in progress."); } } using (var cmd = new SQLiteCommand("INSERT OR REPLACE INTO manageditem (id, json) VALUES (@id,@json)", db)) { cmd.Parameters.Add(new SQLiteParameter("@id", managedCertificate.Id)); cmd.Parameters.Add(new SQLiteParameter("@json", JsonConvert.SerializeObject(managedCertificate, new JsonSerializerSettings { Formatting = Formatting.Indented, NullValueHandling = NullValueHandling.Ignore }))); await cmd.ExecuteNonQueryAsync(); } tran.Commit(); } db.Close(); } }); } finally { _dbMutex.Release(); } return(managedCertificate); }
private async Task WebDirectoryProcessor(ConcurrentQueue <WebDirectory> queue, string name, CancellationToken token) { Logger.Debug($"Start [{name}]"); do { Interlocked.Increment(ref RunningWebDirectoryThreads); if (queue.TryDequeue(out WebDirectory webDirectory)) { try { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo[name] = webDirectory; } if (!Session.ProcessedUrls.Contains(webDirectory.Url)) { Session.ProcessedUrls.Add(webDirectory.Url); Logger.Info($"[{name}] Begin processing {webDirectory.Url}"); if (Session.Root.Uri.Scheme == "ftp") { WebDirectory parsedWebDirectory = await FtpParser.ParseFtpAsync(name, webDirectory); AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else if (Session.Root.Uri.Host == "drive.google.com") { string baseUrl = webDirectory.Url; WebDirectory parsedWebDirectory = await GoogleDriveIndexer.IndexAsync(webDirectory); parsedWebDirectory.Url = baseUrl; AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else { if (webDirectory.Uri.Host == Session.Root.Uri.Host && webDirectory.Uri.LocalPath.StartsWith(Session.Root.Uri.LocalPath)) { Logger.Debug($"[{name}] Start download '{webDirectory.Url}'"); Session.TotalHttpRequests++; await RetryPolicy.ExecuteAsync(async() => { webDirectory.StartTime = DateTimeOffset.UtcNow; HttpResponseMessage httpResponseMessage = await HttpClient.GetAsync(webDirectory.Url); string html = null; if (httpResponseMessage.IsSuccessStatusCode) { html = await GetHtml(httpResponseMessage); } if (FirstRequest && !httpResponseMessage.IsSuccessStatusCode || httpResponseMessage.IsSuccessStatusCode && string.IsNullOrWhiteSpace(html)) { Logger.Warn("First request fails, using Curl fallback User-Agent"); HttpClient.DefaultRequestHeaders.UserAgent.Clear(); HttpClient.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgent_Curl); httpResponseMessage = await HttpClient.GetAsync(webDirectory.Url); if (httpResponseMessage.IsSuccessStatusCode) { html = await GetHtml(httpResponseMessage); Logger.Warn("Yes, this Curl User-Agent did the trick!"); } } if (FirstRequest && !httpResponseMessage.IsSuccessStatusCode || httpResponseMessage.IsSuccessStatusCode && string.IsNullOrWhiteSpace(html)) { Logger.Warn("First request fails, using Chrome fallback User-Agent"); HttpClient.DefaultRequestHeaders.UserAgent.Clear(); HttpClient.DefaultRequestHeaders.UserAgent.ParseAdd(UserAgent_Chrome); httpResponseMessage = await HttpClient.GetAsync(webDirectory.Url); if (httpResponseMessage.IsSuccessStatusCode) { html = await GetHtml(httpResponseMessage); Logger.Warn("Yes, the Chrome User-Agent did the trick!"); } } bool calibreDetected = false; string calibreVersionString = string.Empty; if (httpResponseMessage.IsSuccessStatusCode) { FirstRequest = false; List <string> serverHeaders = new List <string>(); if (httpResponseMessage.Headers.Contains("Server")) { serverHeaders = httpResponseMessage.Headers.GetValues("Server").ToList(); calibreDetected = serverHeaders.Any(h => h.Contains("calibre")); } if (calibreDetected) { string serverHeader = string.Join("/", serverHeaders); calibreVersionString = serverHeader; } else { if (html == null) { html = await GetHtml(httpResponseMessage); } // UNTESTED (cannot find or down Calibre with this issue) const string calibreVersionIdentifier = "CALIBRE_VERSION = \""; calibreDetected = html?.Contains(calibreVersionIdentifier) == true; if (calibreDetected) { int calibreVersionIdentifierStart = html.IndexOf(calibreVersionIdentifier); calibreVersionString = html.Substring(calibreVersionIdentifierStart, html.IndexOf("\"", ++calibreVersionIdentifierStart)); } } } if (calibreDetected) { Version calibreVersion = CalibreParser.ParseVersion(calibreVersionString); Console.WriteLine($"Calibre {calibreVersion} detected! I will index it at max 100 books per 30 seconds, else it will break Calibre..."); Logger.Info($"Calibre {calibreVersion} detected! I will index it at max 100 books per 30 seconds, else it will break Calibre..."); await CalibreParser.ParseCalibre(HttpClient, httpResponseMessage.RequestMessage.RequestUri, webDirectory, calibreVersion); return; } Uri originalUri = new Uri(webDirectory.Url); Logger.Debug($"[{name}] Finish download '{webDirectory.Url}'"); // Process only same site if (httpResponseMessage.RequestMessage.RequestUri.Host == Session.Root.Uri.Host) { int httpStatusCode = (int)httpResponseMessage.StatusCode; if (!Session.HttpStatusCodes.ContainsKey(httpStatusCode)) { Session.HttpStatusCodes[httpStatusCode] = 0; } Session.HttpStatusCodes[httpStatusCode]++; if (httpResponseMessage.IsSuccessStatusCode) { if (html == null) { html = await GetHtml(httpResponseMessage); } Session.TotalHttpTraffic += html.Length; WebDirectory parsedWebDirectory = await DirectoryParser.ParseHtml(webDirectory, html, HttpClient); AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else { Session.Errors++; webDirectory.Error = true; if (!Session.UrlsWithErrors.Contains(webDirectory.Url)) { Session.UrlsWithErrors.Add(webDirectory.Url); } httpResponseMessage.EnsureSuccessStatusCode(); } } else { Logger.Warn($"[{name}] Skipped result of '{webDirectory.Url}' which points to '{httpResponseMessage.RequestMessage.RequestUri}'"); Session.Skipped++; } }); } else { Logger.Warn($"[{name}] Skipped result of '{webDirectory.Url}' because it is not the same host or path"); Session.Skipped++; } } Logger.Info($"[{name}] Finished processing {webDirectory.Url}"); } else { Logger.Warn($"[{name}] Skip, already processed: {webDirectory.Uri}"); } } catch (Exception ex) { Logger.Error(ex, $"Error processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory.Url}'"); Session.Errors++; if (!Session.UrlsWithErrors.Contains(webDirectory.Url)) { Session.UrlsWithErrors.Add(webDirectory.Url); } } finally { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo.Remove(name); } } } Interlocked.Decrement(ref RunningWebDirectoryThreads); // Needed! await Task.Delay(TimeSpan.FromMilliseconds(10)); }while (!token.IsCancellationRequested && (!queue.IsEmpty || RunningWebDirectoryThreads > 0)); Logger.Debug($"Finished [{name}]"); }
public async Task <HttpResponseMessage> QueryUser(int userId) { return(await exponentialRetryPolicy.ExecuteAsync(() => DoUserQuery(userId))); }
private async Task <MqttClientPublishResult> PublishAsync(IPrediction prediction, string fileName, CancellationToken token) { return(await _httpRetryPolicy.ExecuteAsync(() => _publisher.PublishAsync(prediction, fileName, token)) .ConfigureAwait(false)); }
protected override async Task <HttpResponseMessage> SendAsync(HttpRequestMessage request, CancellationToken cancellationToken) { return(await _policy.ExecuteAsync(async() => await base.SendAsync(request, cancellationToken))); }
public static Task <int> ExecuteWithRetryAsync(this IDbConnection conn, string sql, object param = null, IDbTransaction transaction = null, int?commandTimeout = null, CommandType?commandType = null) => RetryPolicy.ExecuteAsync(async() => await conn.ExecuteAsync(sql, param, transaction, commandTimeout, commandType));
public async Task <Dictionary <string, List <TeamRepositoryConnection> > > ReadTeamToRepositoriesMaps(string organization) { var teamToRespositoriesMap = new Dictionary <string, List <TeamRepositoryConnection> >(); var moreTeamsToRead = true; string teamAfterCursor = null; logger.LogDebug($"Reading team information for organziation {organization}"); while (moreTeamsToRead) { var allTeamsRepositoriesQuery = @" query ($login: String!, $afterCursor: String) { organization(login: $login) { teams(first:100, after: $afterCursor, orderBy:{field:NAME, direction:ASC}){ nodes{ name, repositoryEdges: repositories(first:100){ edges{ permission repository : node { name } } pageInfo{ endCursor, hasNextPage } } }, pageInfo{ endCursor, hasNextPage } } } } "; var allTeamsRepositoriesVariables = new { login = organization, afterCursor = teamAfterCursor }; var graphQLOrganization = await githubAbuseRateLimitPolicy.ExecuteAsync(() => { return(graphQLClient.QueryAsync <Model.Github.GraphQL.Organization>(allTeamsRepositoriesQuery, allTeamsRepositoriesVariables)); }).ConfigureAwait(false); if (graphQLOrganization.Teams.Nodes != null && graphQLOrganization.Teams.Nodes.Any()) { foreach (var team in graphQLOrganization.Teams.Nodes) { var teamRepositoryConnections = new List <TeamRepositoryConnection>(); foreach (var teamRepositoryEdge in team.RepositoryEdges.Edges) { var teamRepositoryConnection = new TeamRepositoryConnection { RepositoryName = teamRepositoryEdge.Repository.Name, TeamPermissions = teamRepositoryEdge.Permission }; teamRepositoryConnections.Add(teamRepositoryConnection); } // Now get the additional pages of repositories if we need to bool moreTeamRepositoriesToRead = team.RepositoryEdges.PageInfo.HasNextPage; var afterCursor = team.RepositoryEdges.PageInfo.EndCursor; while (moreTeamRepositoriesToRead) { logger.LogDebug($"Reading additional repository information for team {team.Name}"); var result = await GetAdditionalTeamRepositoriesAsync(team.Name, afterCursor).ConfigureAwait(false); teamRepositoryConnections.AddRange(result.TeamRepositoryConnections); if (result.AfterCursor != null) { moreTeamRepositoriesToRead = true; afterCursor = result.AfterCursor; } else { moreTeamRepositoriesToRead = false; } } teamToRespositoriesMap.Add(team.Name, teamRepositoryConnections); } } moreTeamsToRead = graphQLOrganization.Teams.PageInfo.HasNextPage; if (moreTeamsToRead) { teamAfterCursor = graphQLOrganization.Teams.PageInfo.EndCursor; logger.LogDebug($"Reading additional team information for organziation {organization}"); } } logger.LogDebug($"Finished reading team information for organziation {organization}"); return(teamToRespositoriesMap); async Task <(List <TeamRepositoryConnection> TeamRepositoryConnections, string AfterCursor)> GetAdditionalTeamRepositoriesAsync(string teamName, string afterCursor) { var teamRepositoriesQuery = @" query ($login: String!, $teamName: String!, $repositoriesAfter: String) { organization(login: $login) { teams(first: 1, query: $teamName) { nodes { name repositoryEdges: repositories(first:100, after: $repositoriesAfter){ edges{ permission repository : node { name } } pageInfo{ endCursor, hasNextPage } } } } } } "; var teamRepositoriesVariables = new { login = organization, teamName = teamName, repositoriesAfter = afterCursor }; var graphQLOrganization = await githubAbuseRateLimitPolicy.ExecuteAsync(() => { return(graphQLClient.QueryAsync <Model.Github.GraphQL.Organization>(teamRepositoriesQuery, teamRepositoriesVariables)); }).ConfigureAwait(false); var teamRepositoryConnections = new List <TeamRepositoryConnection>(); foreach (var repositoryEdge in graphQLOrganization.Teams.Nodes.First().RepositoryEdges.Edges) { var teamRepositoryConnection = new TeamRepositoryConnection { RepositoryName = repositoryEdge.Repository.Name, TeamPermissions = repositoryEdge.Permission }; teamRepositoryConnections.Add(teamRepositoryConnection); } string nextAfterCursor = null; if (graphQLOrganization.Teams.Nodes.First().RepositoryEdges.PageInfo.HasNextPage) { nextAfterCursor = graphQLOrganization.Teams.Nodes.First().RepositoryEdges.PageInfo.EndCursor; } return(teamRepositoryConnections, nextAfterCursor); } }
public async Task <ActionResult> ProcessPayment(PaymentDataViewModel requestObj) { try { if (string.IsNullOrEmpty(requestObj.CreditCardNumber) && !expression.IsMatch(requestObj.CreditCardNumber)) { return(BadRequest("CreditCardNumber Is Invalid")); } else if (string.IsNullOrEmpty(requestObj.CardHolder)) { return(BadRequest("CardHolder Is Invalid")); } else if (requestObj.SecurityCode != null && requestObj.SecurityCode.Length > 0 && requestObj.SecurityCode.Length > 3) { return(BadRequest("SecurityCode Is Invalid")); } else if (requestObj.ExpirationDate == null) { return(BadRequest("ExpirationDate Is Invalid")); } else if (requestObj.Amount <= 0) { return(BadRequest("Amount Is Invalid")); } #region Apply Business Rules bool response = false; var model = _mapper.Map <PaymentData>(requestObj); #region CheapPaymentGateway if (requestObj.Amount <= 20) { response = await _repository.ICheapPaymentGateway.ProcessPayments(model, _context); } #endregion #region ExpensivePaymentGateway else if (requestObj.Amount > 20 && requestObj.Amount <= 500) { response = await _repository.IExpensivePaymentGateway.ProcessPayments(model, _context); if (!response) { response = await _repository.ICheapPaymentGateway.ProcessPayments(model, _context); } } #endregion #region PremiumPaymentService else if (requestObj.Amount > 500) { await _retryPolicy.ExecuteAsync(async() => { response = await _repository.IPremiumPaymentService.ProcessPayments(model, _context); }); } #endregion #endregion return(Ok(response)); } catch (Exception ex) { return(StatusCode(int.Parse(HttpStatusCode.InternalServerError.ToString()))); } }
/// <inheritdoc/> public Task DeleteBatchJobAsync(string taskId) => asyncRetryPolicy.ExecuteAsync(() => azureProxy.DeleteBatchJobAsync(taskId));
private async Task ProcessAsync() { var response = await _vaultApiClient.Wallets.GetAsync(new GetWalletGenerationRequestRequest()); if (response.BodyCase == GetWalletGenerationRequestResponse.BodyOneofCase.Error) { _logger.LogError("An error occurred while getting wallet generation requests. {@error}", response.Error); await Task.Delay(_delayOnError); return; } foreach (var walletGenerationRequest in response.Response.Requests) { var context = new LoggingContext { WalletGenerationRequestId = walletGenerationRequest.Id, BlockchainId = walletGenerationRequest.BlockchainId, ProtocolCode = walletGenerationRequest.ProtocolCode, NetworkType = walletGenerationRequest.NetworkType, TenantId = walletGenerationRequest.TenantId, Group = walletGenerationRequest.Group }; try { _logger.LogInformation("Wallet generation request processing. {@context}", context); var wallet = await _retryPolicy.ExecuteAsync(() => GenerateWalletAsync(walletGenerationRequest)); if (await ConfirmAsync(wallet, context)) { _logger.LogInformation("Wallet generation request confirmed. {@context}", context); } } catch (DbException exception) { _logger.LogError(exception, "An error occurred while attempting to access the database. {@context}", context); // silently retry } catch (BlockchainIsNotSupportedException exception) { _logger.LogError(exception, "BlockchainId is not supported. {@context}", context); if (await RejectAsync(walletGenerationRequest.Id, RejectionReason.UnknownBlockchain, "BlockchainId is not supported", context)) { _logger.LogInformation("Wallet generation request rejected. {@context}", context); } } catch (Exception exception) { _logger.LogError(exception, "An error occurred while processing wallet generation request. {@context}", context); if (await RejectAsync(walletGenerationRequest.Id, RejectionReason.Other, exception.Message, context)) { _logger.LogInformation("Wallet generation request rejected. {@context}", context); } } } }
private async Task <string> DownloadAndReplaceAsync(string source, Dictionary <string, string> mirrorCache, IExecutionContext context) { if (mirrorCache.TryGetValue(source, out string cachedValue)) { return(cachedValue); } // Get the destination path and link if (source.StartsWith("//")) { source = $"http:{source}"; } if (!Uri.TryCreate(source, UriKind.Absolute, out Uri uri)) { // Not absolute return(null); } FilePath path = _pathFunc(uri); if (path == null) { throw new ExecutionException($"Null resource mirror path for {source}"); } string link = context.GetLink(path); // Download the resource, but only if we haven't already written it to disk IFile outputFile = context.FileSystem.GetOutputFile(path); if (!outputFile.Exists) { context.LogDebug($"Downloading resource from {uri} to {path.FullPath}"); // Retry with exponential backoff links. This helps with websites like GitHub that will give us a 429 -- TooManyRequests. AsyncRetryPolicy <HttpResponseMessage> retryPolicy = Policy .Handle <HttpRequestException>() .OrResult <HttpResponseMessage>(r => r.StatusCode == TooManyRequests) .WaitAndRetryAsync(MaxAbsoluteLinkRetry, attempt => { context.LogDebug($"Retry {attempt}"); return(TimeSpan.FromSeconds(0.5 * Math.Pow(2, attempt))); }); HttpResponseMessage response = await retryPolicy.ExecuteAsync(async() => { using (HttpClient httpClient = context.CreateHttpClient()) { return(await httpClient.SendAsync(new HttpRequestMessage(HttpMethod.Get, uri))); } }); response.EnsureSuccessStatusCode(); // Copy the result to output using (Stream outputStream = outputFile.OpenWrite()) { await response.Content.CopyToAsync(outputStream); } } mirrorCache.Add(source, link); return(link); }
public Task <double> GetJuros() => RetryPolicy.ExecuteAsync(async() => await client.GetJuros());
public async Task <string> GetHelloMessage() { // Обращаемся к репозиторию, через политику повторителя. return(await _retryPolicy.ExecuteAsync(async() => await _messageRepository.GetHelloMessage())); }
public async Task Execute(IContext ctx, T value) { await _retryPolicy.ExecuteAsync(async (c) => await _next.Execute(ctx, value), ctx.Token); }
async Task ProcessMessages() { var cancellationToken = _cancellationTokenSource.Token; _logger.Info("Starting message handler"); try { var messageBatch = new List <ReceivedLogicalMessage>(MaxQueueLength); while (!cancellationToken.IsCancellationRequested) { while (_messages.TryDequeue(out var message)) { messageBatch.Add(message); } if (!messageBatch.Any()) { await Task.Delay(TimeSpan.FromMilliseconds(100), cancellationToken); continue; } try { await _callbackPolicy.ExecuteAsync(token => _callback(messageBatch, token), cancellationToken); var maxPositionByPartition = messageBatch .GroupBy(m => new { m.Position.Topic, m.Position.Partition }) .Select(a => new { Topic = a.Key.Topic, Partition = a.Key.Partition, Offset = a.Max(p => p.Position.Offset) }) .ToList(); foreach (var max in maxPositionByPartition) { _positions.GetOrAdd(max.Topic, _ => new ConcurrentDictionary <int, long>())[ max.Partition] = max.Offset; } } catch (Exception exception) { _logger.Warn(exception, "Error when handling messages"); } messageBatch.Clear(); } } catch (OperationCanceledException) when(cancellationToken.IsCancellationRequested) { // we're exiting _logger.Info("Message handler stopped"); } catch (Exception exception) { _logger.Error(exception, "Unhandled message handler exception"); } finally { _messageHandlerStopped.Set(); } }
public async Task <SubGlRespObj> GetAllSubGlAsync() { var gosGatewayClient = _httpClientFactory.CreateClient("GOSDEFAULTGATEWAY"); string authorization = _accessor.HttpContext.Request.Headers["Authorization"]; gosGatewayClient.DefaultRequestHeaders.Add("Authorization", authorization); SubGlRespObj responseObj = new SubGlRespObj(); return(await _retryPolicy.ExecuteAsync(async() => { try { result = await gosGatewayClient.GetAsync(ApiRoutes.SubGl.GET_ALL_SUBGL); if (!result.IsSuccessStatusCode) { var data1 = await result.Content.ReadAsStringAsync(); responseObj = JsonConvert.DeserializeObject <SubGlRespObj>(data1); new SubGlRespObj { Status = new APIResponseStatus { Message = new APIResponseMessage { FriendlyMessage = result.ReasonPhrase } } }; } var data = await result.Content.ReadAsStringAsync(); responseObj = JsonConvert.DeserializeObject <SubGlRespObj>(data); } catch (Exception ex) { throw ex; } if (responseObj == null) { return new SubGlRespObj { Status = new APIResponseStatus { IsSuccessful = false, Message = new APIResponseMessage { FriendlyMessage = "System Error!! Please contact Administrator" } } }; } if (!responseObj.Status.IsSuccessful) { return new SubGlRespObj { Status = new APIResponseStatus { IsSuccessful = responseObj.Status.IsSuccessful, Message = responseObj.Status.Message } }; } return new SubGlRespObj { subGls = responseObj.subGls, Status = new APIResponseStatus { IsSuccessful = responseObj.Status.IsSuccessful, Message = responseObj.Status.Message } }; })); }
public async Task ExecuteAsync(Func <CancellationToken, Task> operation, CancellationToken cancellationToken) { await _retryPolicyAsync.ExecuteAsync(operation.Invoke, cancellationToken); }
public async Task <T> GetAsync <T>(Uri uri, IDictionary <string, string> headers, JsonConverter[] converters = null) { return((T)await _retryPolicy.ExecuteAsync(async() => await _wrapped.GetAsync <T>(uri, headers, converters))); }
public new Task <IChannel> BindAsync(IPAddress ipAddress, int port) { return(_exponentialBackOffRetryPolicy.ExecuteAsync( () => base.BindAsync(ipAddress, port) )); }
private async Task ProcessAsync() { var response = await _vaultApiClient.TransferSigningRequests.GetAsync(new GetTransferSigningRequestsRequest()); if (response.BodyCase == GetTransferSigningRequestsResponse.BodyOneofCase.Error) { _logger.LogError("An error occurred while getting transfers signing requests. {@error}", response.Error); await Task.Delay(_delayOnError); return; } foreach (var transferSigningRequest in response.Response.Requests) { var context = new LoggingContext { TransactionSigningRequestId = transferSigningRequest.Id, BlockchainId = transferSigningRequest.BlockchainId, DoubleSpendingProtectionType = transferSigningRequest.DoubleSpendingProtectionType, NetworkType = transferSigningRequest.NetworkType }; try { _logger.LogInformation("Transaction signing request processing. {@context}", context); var transaction = await _retryPolicy.ExecuteAsync(() => SignTransactionAsync(transferSigningRequest)); if (await ConfirmAsync(transaction, context)) { _logger.LogInformation("Transaction signing request confirmed. {@context}", context); } } catch (DbException exception) { _logger.LogError(exception, "An error occurred while attempting to access the database. {@context}", context); // silently retry } catch (BlockchainIsNotSupportedException exception) { _logger.LogError(exception, "BlockchainId is not supported. {@context}", context); if (await RejectAsync(transferSigningRequest.Id, TransferSigningRequestRejectionReason.UnknownBlockchain, "BlockchainId is not supported", context)) { _logger.LogInformation("Transaction signing request rejected. {@context}", context); } } catch (Exception exception) { _logger.LogError(exception, "An error occurred while processing transaction signing request. {@context}", context); if (await RejectAsync(transferSigningRequest.Id, TransferSigningRequestRejectionReason.Other, exception.Message, context)) { _logger.LogInformation("Transaction signing request rejected. {@context}", context); } } } }
private async Task WebDirectoryProcessor(ConcurrentQueue <WebDirectory> queue, string name, CancellationToken cancellationToken) { Logger.Debug($"Start [{name}]"); bool maxConnections = false; do { Interlocked.Increment(ref RunningWebDirectoryThreads); if (queue.TryDequeue(out WebDirectory webDirectory)) { try { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo[name] = webDirectory; } if (!Session.ProcessedUrls.Contains(webDirectory.Url)) { Session.ProcessedUrls.Add(webDirectory.Url); webDirectory.StartTime = DateTimeOffset.UtcNow; Logger.Info($"[{name}] Begin processing {webDirectory.Url}"); if (Session.Root.Uri.Scheme == Constants.UriScheme.Ftp || Session.Root.Uri.Scheme == Constants.UriScheme.Ftps) { WebDirectory parsedWebDirectory = await FtpParser.ParseFtpAsync(name, webDirectory, OpenDirectoryIndexerSettings.Username, OpenDirectoryIndexerSettings.Password); if (webDirectory?.CancellationReason == Constants.Ftp_Max_Connections) { webDirectory.CancellationReason = null; maxConnections = true; if (webDirectory.Name == Constants.Root) { webDirectory.Error = true; Interlocked.Decrement(ref RunningWebDirectoryThreads); throw new Exception("Error checking FTP because maximum connections reached"); } // Requeue Session.ProcessedUrls.Remove(webDirectory.Url); queue.Enqueue(webDirectory); try { await FtpParser.FtpClients[name].DisconnectAsync(cancellationToken); lock (FtpParser.FtpClients) { FtpParser.FtpClients.Remove(name); } } catch (Exception exFtpDisconnect) { Logger.Error(exFtpDisconnect, "Error disconnecting FTP connection."); } } if (parsedWebDirectory != null) { DirectoryParser.CheckParsedResults(parsedWebDirectory); AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } } else if (Session.Root.Uri.Host == Constants.GoogleDriveDomain) { string baseUrl = webDirectory.Url; WebDirectory parsedWebDirectory = await GoogleDriveIndexer.IndexAsync(webDirectory); parsedWebDirectory.Url = baseUrl; AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else { if (Session.Root.Uri.Host == Constants.BlitzfilesTechDomain || SameHostAndDirectory(Session.Root.Uri, webDirectory.Uri)) { Logger.Debug($"[{name}] Start download '{webDirectory.Url}'"); Session.TotalHttpRequests++; CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); cancellationTokenSource.CancelAfter(TimeSpan.FromMinutes(5)); Context pollyContext = new Context { { "Processor", name }, { "WebDirectory", webDirectory }, { "CancellationTokenSource", cancellationTokenSource } }; await RetryPolicy.ExecuteAsync(async (context, token) => { await ProcessWebDirectoryAsync(name, webDirectory, cancellationTokenSource.Token); }, pollyContext, cancellationTokenSource.Token); } else { Logger.Warn($"[{name}] Skipped result of '{webDirectory.Url}' because it is not the same host or path"); Session.Skipped++; } } Logger.Info($"[{name}] Finished processing {webDirectory.Url}"); } else { //Logger.Warn($"[{name}] Skip, already processed: {webDirectory.Uri}"); } } catch (Exception ex) { if (ex is TaskCanceledException taskCanceledException) { Session.Errors++; webDirectory.Error = true; if (!Session.UrlsWithErrors.Contains(webDirectory.Url)) { Session.UrlsWithErrors.Add(webDirectory.Url); } if (webDirectory.ParentDirectory?.Url != null) { Logger.Error($"Skipped processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory.Url}'"); } else { Logger.Error($"Skipped processing Url: '{webDirectory.Url}'"); Session.Root.Error = true; } } else { Logger.Error(ex, $"Error processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory?.Url}'"); } } finally { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo.Remove(name); } if (string.IsNullOrWhiteSpace(webDirectory.CancellationReason)) { webDirectory.Finished = true; webDirectory.FinishTime = DateTimeOffset.UtcNow; } } } Interlocked.Decrement(ref RunningWebDirectoryThreads); // Needed, because of the TryDequeue, no waiting in ConcurrentQueue! if (queue.IsEmpty) { // Don't hog the CPU when queue < threads await Task.Delay(TimeSpan.FromMilliseconds(1000), cancellationToken); } else { await Task.Delay(TimeSpan.FromMilliseconds(10), cancellationToken); } }while (!cancellationToken.IsCancellationRequested && (!queue.IsEmpty || RunningWebDirectoryThreads > 0) && !maxConnections); Logger.Debug($"Finished [{name}]"); }
private async Task WebDirectoryProcessor(ConcurrentQueue <WebDirectory> queue, string name, CancellationToken cancellationToken) { Logger.Debug($"Start [{name}]"); do { Interlocked.Increment(ref RunningWebDirectoryThreads); if (queue.TryDequeue(out WebDirectory webDirectory)) { try { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo[name] = webDirectory; } if (!Session.ProcessedUrls.Contains(webDirectory.Url)) { Session.ProcessedUrls.Add(webDirectory.Url); webDirectory.StartTime = DateTimeOffset.UtcNow; Logger.Info($"[{name}] Begin processing {webDirectory.Url}"); if (Session.Root.Uri.Scheme == "ftp") { WebDirectory parsedWebDirectory = await FtpParser.ParseFtpAsync(name, webDirectory); AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else if (Session.Root.Uri.Host == Constants.GoogleDriveDomain) { string baseUrl = webDirectory.Url; WebDirectory parsedWebDirectory = await GoogleDriveIndexer.IndexAsync(webDirectory); parsedWebDirectory.Url = baseUrl; AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else { if (SameHostAndDirectory(Session.Root.Uri, webDirectory.Uri)) { Logger.Debug($"[{name}] Start download '{webDirectory.Url}'"); Session.TotalHttpRequests++; CancellationTokenSource cancellationTokenSource = new CancellationTokenSource(); cancellationTokenSource.CancelAfter(TimeSpan.FromMinutes(5)); Context pollyContext = new Context { { "Processor", name }, { "WebDirectory", webDirectory }, { "CancellationTokenSource", cancellationTokenSource } }; await RetryPolicy.ExecuteAsync(async (context, token) => { await ProcessWebDirectoryAsync(name, webDirectory, cancellationTokenSource.Token); }, pollyContext, cancellationTokenSource.Token); } else { Logger.Warn($"[{name}] Skipped result of '{webDirectory.Url}' because it is not the same host or path"); Session.Skipped++; } } Logger.Info($"[{name}] Finished processing {webDirectory.Url}"); } else { //Logger.Warn($"[{name}] Skip, already processed: {webDirectory.Uri}"); } } catch (Exception ex) { if (ex is TaskCanceledException taskCanceledException) { if (webDirectory.ParentDirectory?.Url != null) { Logger.Warn($"Skipped processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory.Url}'"); } else { Logger.Warn($"Skipped processing Url: '{webDirectory.Url}'"); Session.Root.Error = true; } } else { Logger.Error(ex, $"Error processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory?.Url}'"); } Session.Errors++; if (!Session.UrlsWithErrors.Contains(webDirectory.Url)) { Session.UrlsWithErrors.Add(webDirectory.Url); } } finally { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo.Remove(name); } webDirectory.Finished = true; webDirectory.FinishTime = DateTimeOffset.UtcNow; } } Interlocked.Decrement(ref RunningWebDirectoryThreads); // Needed! await Task.Delay(TimeSpan.FromMilliseconds(10)); }while (!cancellationToken.IsCancellationRequested && (!queue.IsEmpty || RunningWebDirectoryThreads > 0)); Logger.Debug($"Finished [{name}]"); }
public async Task <IEnumerable <Result> > GetHeroes(string limite) { //if(_client == null) //{ //_client.DefaultRequestHeaders.Accept.Add(new MediaTypeWithQualityHeaderValue("application/json")); //_client.BaseAddress //} //Posso melhorar o tratamento do handleResult //https://dev.to/rickystam/net-core-use-httpclientfactory-and-polly-to-build-rock-solid-services-2edh //referencia //https://www.davidbritch.com/2017/07/transient-fault-handling-in.html _httpRetryPolicy = Policy.HandleResult <HttpResponseMessage>(r => httpStatusCodesToRetry.Contains(r.StatusCode)) .Or <TimeoutRejectedException>() .Or <Exception>() //.OrResult<HttpResponseMessage>(r => httpStatusCodesToRetry.Contains(r.StatusCode)) //.Handle<HttpRequestException>(ex => !ex.Message.ToLower().Contains("404")) .WaitAndRetryAsync ( //Quantidade de tentaivas retryCount: 3, //duração entre as tentativas sleepDurationProvider: retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), //O que será executado se der erro onRetry: async(response, time, retryCount, context) => { if (response.Exception != null) { Console.WriteLine($"Ocorreu um erro ao baixar os dados: {response.Exception.Message}, Tentando novamente em {time}, tentativa número: {retryCount}"); } else { var rawResponse = await response.Result.Content.ReadAsStringAsync(); var json = JsonConvert.DeserializeAnonymousType(rawResponse, new { code = "", status = "" }); Console.WriteLine($"Ocorreu um erro ao baixar os dados: {json.status}, Tentando novamente em {time}, tentativa número: {retryCount}"); } } ); _timeoutPolicy = Policy.TimeoutAsync(5, TimeoutStrategy.Pessimistic); try { //.ExecuteAsync(async () => //{ // Console.WriteLine($"Obtendo Herois..."); // var response = await client.GetAsync(GetUrl(limite)); // var rawResponse = await response.Content.ReadAsStringAsync(); //Problemas para retornar um await direito, a melhor opção por enquantp é deixar finalizar o metodo e retornar o objeto depois //heroes = JsonConvert.DeserializeObject<Hero>(rawResponse); //}); //Referencia //https://nodogmablog.bryanhogan.net/2017/12/using-the-polly-timeout-when-making-a-http-request/ HttpResponseMessage response = await _httpRetryPolicy.ExecuteAsync(() => _timeoutPolicy.ExecuteAsync(async c => await _client.GetAsync(GetUrl(limite), c), CancellationToken.None)); if (response.StatusCode != HttpStatusCode.OK) { var newtonSoft = string.IsNullOrEmpty(await response.Content.ReadAsStringAsync()) ? JsonConvert.DeserializeObject <Hero>(await response.Content.ReadAsStringAsync()) : new Hero(); return(new List <Result>().DefaultIfEmpty()); } else { //Teste Jil //Stopwatch TimerJill = new Stopwatch(); //TimerJill.Start(); //var employeeDeserialized = JSON.Deserialize<Hero>(await response.Content.ReadAsStringAsync(), new Options(dateFormat: DateTimeFormat.MicrosoftStyleMillisecondsSinceUnixEpoch)); //TimerJill.Stop(); //var tempoDecorridoJil = TimerJill.Elapsed.TotalSeconds; var contractResolver = new CustomContractResolver(); //Teste using newtonsof Stopwatch TimerNewton = new Stopwatch(); TimerNewton.Start(); //forma correta de utilizar, sem jogar atribuir para uma string antes de realizar a deserialização. var newtonSoft = JsonConvert.DeserializeObject <Hero>(await response.Content.ReadAsStringAsync(), new JsonSerializerSettings { ContractResolver = contractResolver }); TimerNewton.Stop(); var tempoDecorridoNewton = TimerNewton.Elapsed.TotalSeconds; Stopwatch TimerUsing = new Stopwatch(); TimerUsing.Start(); Hero hero; //Referencia: http://jonathanpeppers.com/Blog/improving-http-performance-in-xamarin-applications using (var stream = await response.Content.ReadAsStreamAsync()) //Referencia minBuffer https://stackoverflow.com/questions/56461022/benchmarking-newtonsoft-json-deserialization-from-stream-and-from-string using (var reader = new StreamReader(stream, Encoding.UTF8, true, 128)) using (var jsonTextReader = new JsonTextReader(reader)) { hero = _serializer.Deserialize <Hero>(jsonTextReader); } TimerUsing.Stop(); var tempoDecorridoUsing = TimerUsing.Elapsed.TotalSeconds; string json = await response.Content.ReadAsStringAsync(); //Referencia //https://stackoverflow.com/questions/8707755/how-to-know-the-size-of-the-string-in-bytes var howManyBytes = json.Length * sizeof(Char); return(newtonSoft.data.results.EmptyIfNull()); } /* * * * * * await Policy * //Se for um erro diferente de 404 ira executar a politica de retry * //podemos utilizar grupo de statusCode * //o ideal é trabalhar em cima dos erros de internet, e continuar tratando o resto pelo statusCode * * //.Handle<TimeoutException>() * .Handle<HttpRequestException>(ex => !ex.Message.ToLower().Contains("404")) * .Or<HttpRequestException>() * //.OrResult<HttpResponseMessage>(r => httpStatusCodesToRetry.Contains(r.StatusCode)) * //.Handle<HttpRequestException>(ex => !ex.Message.ToLower().Contains("404")) * * .WaitAndRetryAsync * ( * //Quantidade de tentaivas * retryCount: 3, * * //duração entre as tentativas * sleepDurationProvider: retryAttempt => TimeSpan.FromSeconds(Math.Pow(2, retryAttempt)), * * //O que será executado se der erro * onRetry: (ex, time) => * { * lock (_MessageLock) * { * Console.BackgroundColor = ConsoleColor.Red; * Console.WriteLine($"Ocorreu um erro ao baixar os dados: {ex.Message}, tentando novamente em...{time}"); * Console.ResetColor(); * } * } * ) * .ExecuteAsync(async () => * { * var response = await client.GetAsync(GetUrl(limite)); * var rawResponse = await response.Content.ReadAsStringAsync(); * * //Problemas para retornar um await direito, a melhor opção por enquantp é deixar finalizar o metodo e retornar o objeto depois * heroes = JsonConvert.DeserializeObject<Hero>(rawResponse); * }); * */ /* * var response = await client.GetAsync(GetUrl(limite)); * * if (response.StatusCode != System.Net.HttpStatusCode.OK) * { * throw new Exception(response.Content.ReadAsStringAsync().Result); * } * var rawResponse = await response.Content.ReadAsStringAsync(); * return JsonConvert.DeserializeObject<Hero>(rawResponse); * */ //return heroes; } catch (Exception ex) { throw ex; } }
private async Task WebDirectoryProcessor(ConcurrentQueue <WebDirectory> queue, string name, CancellationToken token) { Logger.Debug($"Start [{name}]"); do { Interlocked.Increment(ref RunningWebDirectoryThreads); if (queue.TryDequeue(out WebDirectory webDirectory)) { try { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo[name] = webDirectory; } if (!Session.ProcessedUrls.Contains(webDirectory.Url)) { Session.ProcessedUrls.Add(webDirectory.Url); Logger.Info($"[{name}] Begin processing {webDirectory.Url}"); if (Session.Root.Uri.Scheme == "ftp") { WebDirectory parsedWebDirectory = await FtpParser.ParseFtpAsync(name, webDirectory); AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else if (Session.Root.Uri.Host == "drive.google.com") { string baseUrl = webDirectory.Url; WebDirectory parsedWebDirectory = await GoogleDriveIndexer.IndexAsync(webDirectory); parsedWebDirectory.Url = baseUrl; AddProcessedWebDirectory(webDirectory, parsedWebDirectory); } else { if (webDirectory.Uri.Host == Session.Root.Uri.Host && webDirectory.Uri.LocalPath.StartsWith(Session.Root.Uri.LocalPath)) { Logger.Debug($"[{name}] Start download '{webDirectory.Url}'"); Session.TotalHttpRequests++; Context pollyContext = new Context(); pollyContext.Add("Processor", name); pollyContext.Add("WebDirectory", webDirectory); await RetryPolicy.ExecuteAsync(ctx => ProcessWebDirectoryAsync(name, webDirectory), pollyContext); } else { Logger.Warn($"[{name}] Skipped result of '{webDirectory.Url}' because it is not the same host or path"); Session.Skipped++; } } Logger.Info($"[{name}] Finished processing {webDirectory.Url}"); } else { Logger.Warn($"[{name}] Skip, already processed: {webDirectory.Uri}"); } } catch (Exception ex) { Logger.Error(ex, $"Error processing Url: '{webDirectory.Url}' from parent '{webDirectory.ParentDirectory.Url}'"); Session.Errors++; if (!Session.UrlsWithErrors.Contains(webDirectory.Url)) { Session.UrlsWithErrors.Add(webDirectory.Url); } } finally { lock (WebDirectoryProcessorInfoLock) { WebDirectoryProcessorInfo.Remove(name); } } } Interlocked.Decrement(ref RunningWebDirectoryThreads); // Needed! await Task.Delay(TimeSpan.FromMilliseconds(10)); }while (!token.IsCancellationRequested && (!queue.IsEmpty || RunningWebDirectoryThreads > 0)); Logger.Debug($"Finished [{name}]"); }