private async Task DoTimer(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { var balance = await _b2C2RestClient.BalanceAsync(ct); using (var context = CreateContext()) { var ts = DateTime.UtcNow; var items = new List <BalanceEntity>(); foreach (var assetBalance in balance) { var assetName = assetBalance.Key; foreach (var assetMapping in _assetMappings) { assetName = assetName.Replace(assetMapping.Key, assetMapping.Value); } var item = new BalanceEntity { Asset = assetName, Timestamp = ts, Balance = assetBalance.Value }; items.Add(item); } context.Balances.AddRange(items); await context.SaveChangesAsync(ct); } }
public DepositWalletsBalanceProcessingPeriodicalHandler( ILogFactory logFactory, TimeSpan period, int batchSize, string blockchainType, IBlockchainApiClientProvider blockchainApiClientProvider, ICqrsEngine cqrsEngine, IAssetsServiceWithCache assetsService, IEnrolledBalanceRepository enrolledBalanceRepository, IHotWalletsProvider hotWalletsProvider, ICashinRepository cashinRepository, IDepositWalletLockRepository depositWalletLockRepository, IChaosKitty chaosKitty) { _logFactory = logFactory; _batchSize = batchSize; _blockchainType = blockchainType; _blockchainApiClient = blockchainApiClientProvider.Get(blockchainType); _cqrsEngine = cqrsEngine; _assetsService = assetsService; _enrolledBalanceRepository = enrolledBalanceRepository; _hotWalletsProvider = hotWalletsProvider; _cashinRepository = cashinRepository; _depositWalletLockRepository = depositWalletLockRepository; _chaosKitty = chaosKitty; _timer = new TimerTrigger( $"{nameof(DepositWalletsBalanceProcessingPeriodicalHandler)} : {blockchainType}", period, _logFactory); _timer.Triggered += ProcessBalancesAsync; }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var checkDate = await _lastMomentRepo.GetLastEventMomentAsync(_settings.AssetId) ?? _startedAt; // run heartbeat with delay if there are no cashout registered if (DateTime.UtcNow - checkDate > _settings.MaxCashoutInactivePeriod && !await _cashoutLockRepository.IsLockedAsync(_settings.AssetId)) { var opId = Guid.NewGuid(); _log.Info("Starting heartbeat cashout", context: new { opId, checkDate, _settings.AssetId }); _cqrsEngine.SendCommand(new StartHeartbeatCashoutCommand { Amount = _settings.Amount, AssetId = _settings.AssetId, OperationId = opId, ToAddress = _settings.ToAddress, ToAddressExtension = _settings.ToAddressExtension, MaxCashoutInactivePeriod = _settings.MaxCashoutInactivePeriod, ClientId = _settings.ClientId, FeeCashoutTargetClientId = _settings.FeeCashoutTargetClientId, ClientBalance = _settings.ClientBalance }, HeartbeatCashoutBoundedContext.Name, HeartbeatCashoutBoundedContext.Name); } }
private async Task TimerHandlerAsync(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { _log.Info("Timer handler started..."); try { // Refresh CoinMarketCap data and rebuild constituents if needed bool rebuildNeeded; lock (_sync) { var lastRebuildWasYesterday = _lastRebuild.Date < DateTime.UtcNow.Date; var itIsTimeToRebuild = DateTime.UtcNow.TimeOfDay > Settings.RebuildTime; rebuildNeeded = _rebuildNeeded || lastRebuildWasYesterday && itIsTimeToRebuild; } if (rebuildNeeded) { await RefreshCoinMarketCapDataAsync(); await RebuildTopAssets(); await CheckForNewAssetsAsync(); } // Calculate new index await CalculateThenSaveAndPublishAsync(); } catch (Exception e) { _log.Warning("Somethings went wrong in timer handler.", e); } _log.Info("Timer handler finished."); }
private async Task MaintainTickerOnTriggered(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { // TODO: such an approach is Ok for the case of the single running service instance. But once we get // TODO: a necessity to run more instances, the code below will provoke a problem. await TruncateCacheAsync(); await ReloadCacheIfNeededAsync(); }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { _log.Info("Starting checking and updating stakes"); var processExpiredReferralStakesTask = _referralStakesService.ProcessExpiredReferralStakes(); var processWarningsForReferralStakesTask = _referralStakesService.ProcessWarningsForReferralStakes(); await Task.WhenAll(processExpiredReferralStakesTask, processWarningsForReferralStakesTask); _log.Info("Finished checking and updating stakes"); }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var paymentsTask = _paymentsService.MarkPaymentsAsExpiredAsync(_paymentsExpirationPeriod); var requestsTask = _paymentsService.MarkRequestsAsExpiredAsync(); _log.Info("Starting checking and marking partners payments as expired"); await Task.WhenAll(paymentsTask, requestsTask); _log.Info("Finished checking and marking partners payments as expired"); }
private async Task DoTimer(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { foreach (var client in _cryptoIndexClientManager.GetAll()) { await LoadCryptoIndex(client.Key, client.Value); } await LoadMarketCupData(); await LoadPriceHistory(); }
public async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { try { await Execute(); } catch (Exception ex) { _log.Error(ex); } }
private async Task ReconnectIfNeeded(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { try { ReconnectIfNeeded(); } catch (Exception e) { _log.Info("Error during ReconnectIfNeeded.", exception: e); } }
private Task HandleTimerTriggered(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { if (DateTime.UtcNow > _currentBatchExpirationMoment) { _batchBlock.TriggerBatch(); ExtendBatchExpiration(); } return(Task.CompletedTask); }
private async Task Handler(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { try { await DoTimer(); } catch (Exception ex) { Log.Error(ex, context: $"Metainfo: {Metainfo}"); } }
private async Task DoTimer(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { if (!StartWork()) { return; } try { using (var context = CreateContext()) { var ledgerRequest = new LedgersRequest { Limit = 10 }; var data = await _b2C2RestClient.GetLedgerHistoryAsync(ledgerRequest, ct); var added = 0; do { added = 0; foreach (var log in data.Data) { foreach (var assetMapping in _assetMappings) { log.Currency = log.Currency.Replace(assetMapping.Key, assetMapping.Value); } var item = await context.Ledgers.FirstOrDefaultAsync( e => e.TransactionId == log.TransactionId, ct); if (item != null) { continue; } item = new LedgerEntity(log); context.Ledgers.Add(item); added++; } await context.SaveChangesAsync(ct); ledgerRequest.Cursor = data.Next; data = await _b2C2RestClient.GetLedgerHistoryAsync(ledgerRequest, ct); } while (added > 0); } } finally { StopWork(); } }
private Task CheckStreams(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { var streamsToRemove = _streamList .Where(x => x.CancelationToken.HasValue && x.CancelationToken.Value.IsCancellationRequested) .ToList(); foreach (var streamData in streamsToRemove) { RemoveStream(streamData); } return(Task.CompletedTask); }
private async Task ForceReconnect(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { _log.Info("Force reconnect by timer."); try { ForceReconnect(); } catch (Exception e) { _log.Info("Error during ForceReconnect.", exception: e); } }
public Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { try { Execute(); } catch (Exception ex) { _log.Error(ex); } return(Task.CompletedTask); }
private async Task TimerTriggeredEventHandler( ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { try { _settingsValue = await DownloadSettingsAsync(); } catch (Exception e) { _log.Warning(e.Message, e); } }
private async Task DoTime(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { foreach (var service in _services.OrderBy(e => e.Order, OrderByDirection.Ascending)) { try { await service.CalculateMarket(); } catch (Exception ex) { _log.Error(ex, process: nameof(DoTime), context: service.Name); } } }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var limitsReached = await _limitsService.GetAllLimitReachedAsync(); if (limitsReached.Count == 0) { return; } var clientIds = limitsReached.Select(x => x.ClientId).Distinct().ToArray(); var clients = (await _clientAccountClient.ClientAccountInformation.GetClientsByIdsAsync( new ClientIdsRequest { Ids = clientIds })).ToList(); var personalDatas = (await _personalDataService.GetAsync(clientIds)).ToList(); foreach (var limit in limitsReached) { var client = clients.FirstOrDefault(x => x.Id == limit.ClientId); var pd = personalDatas.FirstOrDefault(x => x.Id == limit.ClientId); if (client == null || pd == null) { _log.Warning("Client or personal data not found", context: limit.ClientId); continue; } var currentLimitSettingsTask = _limitsService.GetClientLimitSettingsAsync(limit.ClientId, client.Tier, pd.CountryFromPOA); var checkAmountTask = _limitsService.GetClientDepositAmountAsync(limit.ClientId); await Task.WhenAll(currentLimitSettingsTask, checkAmountTask); if (currentLimitSettingsTask.Result?.MaxLimit == null) { continue; } var checkAmount = checkAmountTask.Result; if (checkAmount < currentLimitSettingsTask.Result.MaxLimit.Value) { await _limitsService.RemoveLimitReachedAsync(limit.ClientId); _log.Info("Limit reached removed", context: limit.ClientId); } } }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var stopWatch = new Stopwatch(); stopWatch.Start(); await _operationOrchestratorService.ProcessHangedOperationsAsync(); stopWatch.Stop(); if (stopWatch.Elapsed > _idlePeriod) { _log.Warning("Processing of hanged operations takes more time than the idle period of the handler."); } }
public Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { try { _log.Info("Arbitrage Detector timer started..."); Execute(); } catch (Exception ex) { _log.Error(ex); } _log.Info("Arbitrage Detector timer finished."); return(Task.CompletedTask); }
private Task Ping(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { foreach (var streamData in _streamList) { var instance = streamData.LastSentData ?? Activator.CreateInstance <T>(); try { streamData.Stream.WriteAsync(instance) .ContinueWith(t => RemoveStream(streamData), TaskContinuationOptions.OnlyOnFaulted); } catch {} } return(Task.CompletedTask); }
private async Task DoTimer(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { if (!StartWork()) { return; } var offset = 0; var page = 100; try { using (var context = CreateContext()) { // Update trades from last time var data = await _b2C2RestClient.GetTradeHistoryAsync(offset, page, ct); var added = 0; do { added = 0; foreach (var log in data) { var item = await context.Trades.FirstOrDefaultAsync(e => e.TradeId == log.TradeId, ct); if (item != null) { continue; } item = new TradeEntity(log); context.Trades.Add(item); added++; } await context.SaveChangesAsync(ct); offset += data.Count; data = await _b2C2RestClient.GetTradeHistoryAsync(offset, page, ct); } while (added > 0); } } finally { StopWork(); } }
public virtual async Task Execute( ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { try { await _timePublisher.PublishAsync(new TimeEvent { TimeStamp = DateTime.UtcNow }); } catch (Exception exception) { _log.Warning("An error publishing event has occured", exception); } }
private Task Job(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationtoken) { _lock.EnterReadLock(); List <StreamData <T> > streams; try { streams = _streamList .Where(x => !x.CancelationToken?.IsCancellationRequested ?? true) .ToList(); } finally { _lock.ExitReadLock(); } return(ProcesJobAsync(streams)); }
public HeartbeatCashoutTimeoutFinisherPeriodicalHandler(HeartbeatCashoutPeriodicalHandlerSettings settings, TimeSpan timerPeriod, ILogFactory logFactory, ICqrsEngine cqrsEngine, ICashoutLockRepository cashoutLockRepository) { _settings = settings; _cqrsEngine = cqrsEngine; _cashoutLockRepository = cashoutLockRepository; _log = logFactory.CreateLog(this); _timer = new TimerTrigger( $"{nameof(HeartbeatCashoutStarterPeriodicalHandler)} : {settings.AssetId}", timerPeriod, logFactory); _timer.Triggered += Execute; }
private async Task DoTimer(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken ct) { var balance = await _b2C2RestClient.BalanceAsync(ct); using (var context = CreateContext()) { var ts = DateTime.UtcNow; var items = balance.Select(e => new BalanceEntity { Asset = e.Key, Timestamp = ts, Balance = e.Value }).ToList(); context.Balances.AddRange(items); await context.SaveChangesAsync(ct); } }
public AzureTableLogPersistenceQueue( [NotNull] INoSQLTableStorage <LogEntity> storage, [NotNull] string logName, [NotNull] ILogFactory lastResortLogFactory, TimeSpan maxBatchLifetime, int batchSizeThreshold) { if (string.IsNullOrEmpty(logName)) { throw new ArgumentException("Should be not empty string", nameof(logName)); } if (maxBatchLifetime < TimeSpan.Zero) { throw new ArgumentOutOfRangeException(nameof(maxBatchLifetime), maxBatchLifetime, "Should be positive time span"); } if (batchSizeThreshold < 1) { throw new ArgumentOutOfRangeException(nameof(batchSizeThreshold), batchSizeThreshold, "Should be positive number"); } _maxBatchLifetime = maxBatchLifetime; _storage = storage ?? throw new ArgumentNullException(nameof(storage)); _batchBlock = new BatchBlock <LogEntity>(batchSizeThreshold); _groupBatchBlock = new TransformManyBlock <LogEntity[], IGrouping <string, LogEntity> >(batch => GroupEntriesBatch(batch)); _persistGroupBlock = new ActionBlock <IGrouping <string, LogEntity> >( // ReSharper disable once ConvertClosureToMethodGroup group => PersistEntriesGroup(group), new ExecutionDataflowBlockOptions { MaxDegreeOfParallelism = 2 }); _batchBlock.LinkTo(_groupBatchBlock); _groupBatchBlock.LinkTo(_persistGroupBlock); ExtendBatchExpiration(); _timer = new TimerTrigger(logName, TimeSpan.FromMilliseconds(50), lastResortLogFactory) .DisableTelemetry(); _timer.Triggered += HandleTimerTriggered; _timer.Start(); }
private async Task Execute(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var date = DateTime.UtcNow - _marketDataInterval; double to = date.ToUnixTime(); var assetPairIds = (await _marketProfileClient.ApiMarketProfileGetAsync(cancellationToken)) .Select(x => x.AssetPair) .ToList(); var tasks = new List <Task>(); foreach (var assetPairId in assetPairIds) { tasks.Add(_database.SortedSetRemoveRangeByScoreAsync(RedisService.GetMarketDataBaseVolumeKey(assetPairId), 0, to, Exclude.Stop, CommandFlags.FireAndForget)); tasks.Add(_database.SortedSetRemoveRangeByScoreAsync(RedisService.GetMarketDataQuoteVolumeKey(assetPairId), 0, to, Exclude.Stop, CommandFlags.FireAndForget)); tasks.Add(_database.SortedSetRemoveRangeByScoreAsync(RedisService.GetMarketDataPriceKey(assetPairId), 0, to, Exclude.Stop, CommandFlags.FireAndForget)); } await Task.WhenAll(tasks); }
private async Task ProcessBalancesAsync(ITimerTrigger timer, TimerTriggeredHandlerArgs args, CancellationToken cancellationToken) { var assets = (await _assetsService.GetAllAssetsAsync(false, cancellationToken)) .Where(a => a.BlockchainIntegrationLayerId == _blockchainType) .ToDictionary( a => a.BlockchainIntegrationLayerAssetId, a => a); var blockchainAssets = await _blockchainApiClient.GetAllAssetsAsync(_batchSize); var balanceProcessor = new BalanceProcessor( _blockchainType, _logFactory, _hotWalletsProvider, _blockchainApiClient, _cqrsEngine, _enrolledBalanceRepository, assets, blockchainAssets); await balanceProcessor.ProcessAsync(_batchSize); }