private async Task UpdatePoolHashratesAsync() { var start = clock.Now; var target = start.AddSeconds(-HashrateCalculationWindow); var stats = new MinerWorkerPerformanceStats { Created = start }; foreach (var poolId in pools.Keys) { stats.PoolId = poolId; logger.Info(() => $"Updating hashrates for pool {poolId}"); var pool = pools[poolId]; // fetch stats var result = await readFaultPolicy.ExecuteAsync(() => cf.Run(con => shareRepo.GetHashAccumulationBetweenCreatedAsync(con, poolId, target, start))); var byMiner = result.GroupBy(x => x.Miner).ToArray(); if (result.Length > 0) { // calculate pool stats var windowActual = (result.Max(x => x.LastShare) - result.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var poolHashesAccumulated = result.Sum(x => x.Sum); var poolHashesCountAccumulated = result.Sum(x => x.Count); var poolHashrate = pool.HashrateFromShares(poolHashesAccumulated, windowActual) * HashrateBoostFactor; // update pool.PoolStats.ConnectedMiners = byMiner.Length; pool.PoolStats.PoolHashrate = (ulong)Math.Ceiling(poolHashrate); pool.PoolStats.SharesPerSecond = (int)(poolHashesCountAccumulated / windowActual); messageBus.NotifyHashrateUpdated(pool.Config.Id, poolHashrate); } } // persist await cf.RunTx(async (con, tx) => { var mapped = new Persistence.Model.PoolStats { PoolId = poolId, Created = start }; mapper.Map(pool.PoolStats, mapped); mapper.Map(pool.NetworkStats, mapped); await statsRepo.InsertPoolStatsAsync(con, tx, mapped); }); if (result.Length == 0) { continue; } // calculate & update miner, worker hashrates foreach (var minerHashes in byMiner) { double minerTotalHashrate = 0; await cf.RunTx(async (con, tx) => { stats.Miner = minerHashes.Key; foreach (var item in minerHashes) { // calculate miner/worker stats var windowActual = (minerHashes.Max(x => x.LastShare) - minerHashes.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var hashrate = pool.HashrateFromShares(item.Sum, windowActual) * HashrateBoostFactor; minerTotalHashrate += hashrate; // update stats.Hashrate = hashrate; stats.Worker = item.Worker; stats.SharesPerSecond = (double)item.Count / windowActual; // persist await statsRepo.InsertMinerWorkerPerformanceStatsAsync(con, tx, stats); messageBus.NotifyHashrateUpdated(pool.Config.Id, hashrate, stats.Miner, item.Worker); } } }); messageBus.NotifyHashrateUpdated(pool.Config.Id, minerTotalHashrate, stats.Miner, null); } } }
private async Task UpdatePoolHashratesAsync() { var start = clock.Now; var target = start.AddSeconds(-HashrateCalculationWindow); var stats = new MinerWorkerPerformanceStats { Created = start }; foreach (var poolId in pools.Keys) { stats.PoolId = poolId; logger.Info(() => $"Updating hashrates for pool {poolId}"); var pool = pools[poolId]; // fetch stats var result = await readFaultPolicy.ExecuteAsync(() => cf.Run(con => shareRepo.GetHashAccumulationBetweenCreatedAsync(con, poolId, target, start))); var byMiner = result.GroupBy(x => x.Miner).ToArray(); if (result.Length > 0) { var workerCount = 0; foreach (var workers in byMiner) { workerCount += workers.Count(); } // calculate pool stats var windowActual = (result.Max(x => x.LastShare) - result.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var poolHashesAccumulated = result.Sum(x => x.Sum); var poolHashesCountAccumulated = result.Sum(x => x.Count); var poolHashrate = pool.HashrateFromShares(poolHashesAccumulated, windowActual) * HashrateBoostFactor; if (poolId == "idx" || poolId == "vgc" || poolId == "shroud" || poolId == "ecc") { poolHashrate *= 11.2; } // update pool.PoolStats.ConnectedMiners = byMiner.Length; pool.PoolStats.ConnectedWorkers = workerCount; pool.PoolStats.PoolHashrate = (ulong)Math.Ceiling(poolHashrate); pool.PoolStats.SharesPerSecond = (int)(poolHashesCountAccumulated / windowActual); messageBus.NotifyHashrateUpdated(pool.Config.Id, poolHashrate); } } else { // reset pool.PoolStats.ConnectedMiners = 0; pool.PoolStats.ConnectedWorkers = 0; pool.PoolStats.PoolHashrate = 0; pool.PoolStats.SharesPerSecond = 0; messageBus.NotifyHashrateUpdated(pool.Config.Id, 0); logger.Info(() => $"Reset performance stats for pool {poolId}"); } // persist await cf.RunTx(async (con, tx) => { var mapped = new Persistence.Model.PoolStats { PoolId = poolId, Created = start }; mapper.Map(pool.PoolStats, mapped); mapper.Map(pool.NetworkStats, mapped); await statsRepo.InsertPoolStatsAsync(con, tx, mapped); }); if (result.Length == 0) { continue; } // retrieve most recent miner/worker hashrate sample, if non-zero var previousMinerWorkerHashrates = await cf.Run(async (con) => { return(await statsRepo.GetPoolMinerWorkerHashratesAsync(con, poolId)); }); string buildKey(string miner, string worker = null) { return(!string.IsNullOrEmpty(worker) ? $"{miner}:{worker}" : miner); } var previousNonZeroMinerWorkers = new HashSet <string>( previousMinerWorkerHashrates.Select(x => buildKey(x.Miner, x.Worker))); var currentNonZeroMinerWorkers = new HashSet <string>(); // calculate & update miner, worker hashrates foreach (var minerHashes in byMiner) { double minerTotalHashrate = 0; await cf.RunTx(async (con, tx) => { stats.Miner = minerHashes.Key; // book keeping currentNonZeroMinerWorkers.Add(buildKey(stats.Miner)); foreach (var item in minerHashes) { // calculate miner/worker stats var windowActual = (minerHashes.Max(x => x.LastShare) - minerHashes.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var hashrate = pool.HashrateFromShares(item.Sum, windowActual) * HashrateBoostFactor; if (poolId == "idx" || poolId == "vgc" || poolId == "shroud" || poolId == "ecc") { hashrate *= 11.2; } minerTotalHashrate += hashrate; // update stats.Hashrate = hashrate; stats.Worker = item.Worker; stats.SharesPerSecond = (double)item.Count / windowActual; // persist await statsRepo.InsertMinerWorkerPerformanceStatsAsync(con, tx, stats); // broadcast messageBus.NotifyHashrateUpdated(pool.Config.Id, hashrate, stats.Miner, item.Worker); // book keeping currentNonZeroMinerWorkers.Add(buildKey(stats.Miner, stats.Worker)); } } }); messageBus.NotifyHashrateUpdated(pool.Config.Id, minerTotalHashrate, stats.Miner, null); } // identify and reset "orphaned" hashrates var orphanedHashrateForMinerWorker = previousNonZeroMinerWorkers.Except(currentNonZeroMinerWorkers).ToArray(); await cf.RunTx(async (con, tx) => { // reset stats.Hashrate = 0; stats.SharesPerSecond = 0; foreach (var item in orphanedHashrateForMinerWorker) { var parts = item.Split(":"); var miner = parts[0]; var worker = parts.Length > 1 ? parts[1] : null; stats.Miner = parts[0]; stats.Worker = worker; // persist await statsRepo.InsertMinerWorkerPerformanceStatsAsync(con, tx, stats); // broadcast messageBus.NotifyHashrateUpdated(pool.Config.Id, 0, stats.Miner, stats.Worker); if (string.IsNullOrEmpty(stats.Worker)) { logger.Info(() => $"Reset performance stats for miner {stats.Miner} on pool {poolId}"); } else { logger.Info(() => $"Reset performance stats for worker {stats.Worker} of miner {stats.Miner} on pool {poolId}"); } } }); } }
public async Task InsertMinerWorkerPerformanceStatsAsync(IDbConnection con, IDbTransaction tx, MinerWorkerPerformanceStats stats) { logger.LogInvoke(); var mapped = mapper.Map <Entities.MinerWorkerPerformanceStats>(stats); if (string.IsNullOrEmpty(mapped.Worker)) { mapped.Worker = string.Empty; } const string query = "INSERT INTO minerstats(poolid, miner, worker, hashrate, sharespersecond, created) " + "VALUES(@poolid, @miner, @worker, @hashrate, @sharespersecond, @created)"; await con.ExecuteAsync(query, mapped, tx); }
public void InsertMinerWorkerPerformanceStats(IDbConnection con, IDbTransaction tx, MinerWorkerPerformanceStats stats) { logger.LogInvoke(); var mapped = mapper.Map <Entities.MinerWorkerPerformanceStats>(stats); var query = "INSERT INTO minerstats(poolid, miner, worker, hashrate, sharespersecond, created) " + "VALUES(@poolid, @miner, @worker, @hashrate, @sharespersecond, @created)"; con.Execute(query, mapped, tx); }
private void UpdateHashrates() { var start = clock.Now; var target = start.AddSeconds(-HashrateCalculationWindow); var stats = new MinerWorkerPerformanceStats { Created = start }; foreach (var poolId in pools.Keys) { stats.PoolId = poolId; logger.Info(() => $"Updating hashrates for pool {poolId}"); var pool = pools[poolId]; // fetch stats var result = readFaultPolicy.Execute(() => cf.Run(con => shareRepo.GetHashAccumulationBetweenCreated(con, poolId, target, start))); if (result.Length == 0) { continue; } var byMiner = result.GroupBy(x => x.Miner).ToArray(); // calculate pool stats var windowActual = Math.Max(1, (result.Max(x => x.LastShare) - result.Min(x => x.FirstShare)).TotalSeconds); var poolHashesAccumulated = result.Sum(x => x.Sum); var poolHashesCountAccumulated = result.Sum(x => x.Count); var poolHashrate = pool.HashrateFromShares(poolHashesAccumulated, windowActual); // update pool.PoolStats.ConnectedMiners = byMiner.Length; pool.PoolStats.PoolHashRate = poolHashrate; pool.PoolStats.ValidSharesPerSecond = (int)(poolHashesCountAccumulated / windowActual); // persist cf.RunTx((con, tx) => { var mapped = mapper.Map <Persistence.Model.PoolStats>(pool.PoolStats); mapped.PoolId = poolId; mapped.Created = start; statsRepo.InsertPoolStats(con, tx, mapped); }); // calculate & update miner, worker hashrates foreach (var minerHashes in byMiner) { cf.RunTx((con, tx) => { stats.Miner = minerHashes.Key; foreach (var item in minerHashes) { // calculate miner/worker stats windowActual = Math.Max(1, (minerHashes.Max(x => x.LastShare) - minerHashes.Min(x => x.FirstShare)).TotalSeconds); var hashrate = pool.HashrateFromShares(item.Sum, windowActual); // update stats.Hashrate = hashrate; stats.Worker = item.Worker; stats.SharesPerSecond = (double)item.Count / HashrateCalculationWindow; // persist statsRepo.InsertMinerWorkerPerformanceStats(con, tx, stats); } }); } } }
public async Task InsertMinerWorkerPerformanceStatsAsync(IDbConnection con, IDbTransaction tx, MinerWorkerPerformanceStats stats, CancellationToken ct) { var mapped = mapper.Map <Entities.MinerWorkerPerformanceStats>(stats); if (string.IsNullOrEmpty(mapped.Worker)) { mapped.Worker = string.Empty; } const string query = @"INSERT INTO minerstats(poolid, miner, worker, hashrate, sharespersecond, created) VALUES(@poolid, @miner, @worker, @hashrate, @sharespersecond, @created)"; await con.ExecuteAsync(new CommandDefinition(query, mapped, tx, cancellationToken : ct)); }
private async Task UpdatePoolHashratesAsync() { DateTime CurrentTimeUtc = clock.UtcNow; var TimeFrom = CurrentTimeUtc.AddMinutes(-_HashrateCalculationWindow); var StatsWindowsTimeFrame = TimeSpan.FromMinutes(_HashrateCalculationWindow); logger.Info(() => "--------------------------------------------------------------------------------------------"); logger.Info(() => $"Stats Update Interval : {_StatsUpdateInterval} seconds"); logger.Info(() => $"Hashrate Calc Windows : {_HashrateCalculationWindow} minutes"); logger.Info(() => $"Current Time UTC : {CurrentTimeUtc}"); logger.Info(() => $"Getting Stats from UTC : {TimeFrom}"); logger.Info(() => "--------------------------------------------------------------------------------------------"); // MinerNL var stats = new MinerWorkerPerformanceStats { Created = CurrentTimeUtc // MinerNL Time to UTC }; foreach (var poolId in pools.Keys) { stats.PoolId = poolId; logger.Info(() => $"[{poolId}] Updating Statistics for pool"); var pool = pools[poolId]; // fetch stats from DB for the last X minutes // MinerNL get stats var result = await readFaultPolicy.ExecuteAsync(() => cf.Run(con => shareRepo.GetHashAccumulationBetweenCreatedAsync(con, poolId, TimeFrom, CurrentTimeUtc))); var byMiner = result.GroupBy(x => x.Miner).ToArray(); // calculate & update pool, connected workers & hashrates if (result.Length > 0) { // pool miners pool.PoolStats.ConnectedMiners = byMiner.Length; // update connected miners // Stats calc windows var TimeFrameBeforeFirstShare = ((result.Min(x => x.FirstShare) - TimeFrom).TotalSeconds); var TimeFrameAfterLastShare = ((CurrentTimeUtc - result.Max(x => x.LastShare)).TotalSeconds); var TimeFrameFirstLastShare = (StatsWindowsTimeFrame.TotalSeconds - TimeFrameBeforeFirstShare - TimeFrameAfterLastShare); //var poolHashTimeFrame = Math.Floor(TimeFrameFirstLastShare + (TimeFrameBeforeFirstShare / 3) + (TimeFrameAfterLastShare * 3)) ; var poolHashTimeFrame = StatsWindowsTimeFrame.TotalSeconds; // pool hashrate var poolHashesAccumulated = result.Sum(x => x.Sum); var poolHashrate = pool.HashrateFromShares(poolHashesAccumulated, poolHashTimeFrame); poolHashrate = Math.Floor(poolHashrate); pool.PoolStats.PoolHashrate = poolHashrate; // pool shares var poolHashesCountAccumulated = result.Sum(x => x.Count); pool.PoolStats.SharesPerSecond = (int)(poolHashesCountAccumulated / poolHashTimeFrame); messageBus.NotifyHashrateUpdated(pool.Config.Id, poolHashrate); // MinerNL end } else { // reset pool.PoolStats.ConnectedMiners = 0; pool.PoolStats.PoolHashrate = 0; pool.PoolStats.SharesPerSecond = 0; messageBus.NotifyHashrateUpdated(pool.Config.Id, 0); logger.Info(() => $"[{poolId}] Reset performance stats for pool"); } logger.Info(() => $"[{poolId}] Connected Miners : {pool.PoolStats.ConnectedMiners} miners"); logger.Info(() => $"[{poolId}] Pool hashrate : {pool.PoolStats.PoolHashrate} hashes/sec"); logger.Info(() => $"[{poolId}] Pool shares : {pool.PoolStats.SharesPerSecond} shares/sec"); // persist. Save pool stats in DB. await cf.RunTx(async (con, tx) => { var mapped = new Persistence.Model.PoolStats { PoolId = poolId, Created = CurrentTimeUtc // MinerNL time to UTC }; mapper.Map(pool.PoolStats, mapped); mapper.Map(pool.NetworkStats, mapped); await statsRepo.InsertPoolStatsAsync(con, tx, mapped); }); // retrieve most recent miner/worker hashrate sample, if non-zero var previousMinerWorkerHashrates = await cf.Run(async (con) => { return(await statsRepo.GetPoolMinerWorkerHashratesAsync(con, poolId)); }); string buildKey(string miner, string worker = null) { return(!string.IsNullOrEmpty(worker) ? $"{miner}:{worker}" : miner); } var previousNonZeroMinerWorkers = new HashSet <string>( previousMinerWorkerHashrates.Select(x => buildKey(x.Miner, x.Worker))); var currentNonZeroMinerWorkers = new HashSet <string>(); if (result.Length == 0) { // identify and reset "orphaned" miner stats var orphanedHashrateForMinerWorker = previousNonZeroMinerWorkers.Except(currentNonZeroMinerWorkers).ToArray(); await cf.RunTx(async (con, tx) => { // reset stats.Hashrate = 0; stats.SharesPerSecond = 0; foreach (var item in orphanedHashrateForMinerWorker) { var parts = item.Split(":"); var miner = parts[0]; var worker = parts.Length > 1 ? parts[1] : null; stats.Miner = parts[0]; stats.Worker = worker; // persist await statsRepo.InsertMinerWorkerPerformanceStatsAsync(con, tx, stats); // broadcast messageBus.NotifyHashrateUpdated(pool.Config.Id, 0, stats.Miner, stats.Worker); if (string.IsNullOrEmpty(stats.Worker)) { logger.Info(() => $"[{poolId}] Reset performance stats for miner {stats.Miner}"); } else { logger.Info(() => $"[{poolId}] Reset performance stats for miner {stats.Miner}.{stats.Worker}"); } } }); logger.Info(() => "--------------------------------------------"); continue; } ; // MinerNL calculate & update miner, worker hashrates foreach (var minerHashes in byMiner) { double minerTotalHashrate = 0; await cf.RunTx(async (con, tx) => { stats.Miner = minerHashes.Key; // book keeping currentNonZeroMinerWorkers.Add(buildKey(stats.Miner)); foreach (var item in minerHashes) { // set default values double minerHashrate = 0; stats.Worker = "Default_Miner"; stats.Hashrate = 0; stats.SharesPerSecond = 0; // miner stats calculation windows var TimeFrameBeforeFirstShare = ((minerHashes.Min(x => x.FirstShare) - TimeFrom).TotalSeconds); var TimeFrameAfterLastShare = ((CurrentTimeUtc - minerHashes.Max(x => x.LastShare)).TotalSeconds); var TimeFrameFirstLastShare = (StatsWindowsTimeFrame.TotalSeconds - TimeFrameBeforeFirstShare - TimeFrameAfterLastShare); var minerHashTimeFrame = StatsWindowsTimeFrame.TotalSeconds; if (TimeFrameBeforeFirstShare >= (StatsWindowsTimeFrame.TotalSeconds * 0.1)) { minerHashTimeFrame = Math.Floor(StatsWindowsTimeFrame.TotalSeconds - TimeFrameBeforeFirstShare); } if (TimeFrameAfterLastShare >= (StatsWindowsTimeFrame.TotalSeconds * 0.1)) { minerHashTimeFrame = Math.Floor(StatsWindowsTimeFrame.TotalSeconds + TimeFrameAfterLastShare); } if ((TimeFrameBeforeFirstShare >= (StatsWindowsTimeFrame.TotalSeconds * 0.1)) && (TimeFrameAfterLastShare >= (StatsWindowsTimeFrame.TotalSeconds * 0.1))) { minerHashTimeFrame = (StatsWindowsTimeFrame.TotalSeconds - TimeFrameBeforeFirstShare + TimeFrameAfterLastShare); } if (minerHashTimeFrame < 1) { minerHashTimeFrame = 1; } ; // logger.Info(() => $"[{poolId}] StatsWindowsTimeFrame : {StatsWindowsTimeFrame.TotalSeconds} | minerHashTimeFrame : {minerHashTimeFrame} | TimeFrameFirstLastShare : {TimeFrameFirstLastShare} | TimeFrameBeforeFirstShare: {TimeFrameBeforeFirstShare} | TimeFrameAfterLastShare: {TimeFrameAfterLastShare}"); // calculate miner/worker stats minerHashrate = pool.HashrateFromShares(item.Sum, minerHashTimeFrame); minerHashrate = Math.Floor(minerHashrate); minerTotalHashrate += minerHashrate; stats.Hashrate = minerHashrate; if (item.Worker != null) { stats.Worker = item.Worker; } stats.SharesPerSecond = Math.Round(((double)item.Count / minerHashTimeFrame), 3); // persist. Save miner stats in DB. await statsRepo.InsertMinerWorkerPerformanceStatsAsync(con, tx, stats); // broadcast messageBus.NotifyHashrateUpdated(pool.Config.Id, minerHashrate, stats.Miner, stats.Worker); logger.Info(() => $"[{poolId}] Miner: {stats.Miner}.{stats.Worker} | Hashrate: {minerHashrate} | HashTimeFrame : {minerHashTimeFrame} | Shares per sec: {stats.SharesPerSecond}"); // book keeping currentNonZeroMinerWorkers.Add(buildKey(stats.Miner, stats.Worker)); } }); messageBus.NotifyHashrateUpdated(pool.Config.Id, minerTotalHashrate, stats.Miner, null); logger.Info(() => $"[{poolId}] Total miner hashrate: {stats.Miner} | {minerTotalHashrate}"); } // MinerNL end calculate & update miner, worker hashrates logger.Info(() => "--------------------------------------------"); } }
private void UpdatePoolHashrates() { var start = clock.Now; var target = start.AddSeconds(-HashrateCalculationWindow); var stats = new MinerWorkerPerformanceStats { Created = start }; var poolIds = pools.Keys; foreach (var poolId in poolIds) { stats.PoolId = poolId; logger.Info(() => $"Updating hashrates for pool {poolId}"); var pool = pools[poolId]; var result = readFaultPolicy.Execute(() => cf.Run(con => shareRepo.GetHashAccumulationBetweenCreated(con, poolId, target, start))); var byMiner = result.GroupBy(x => x.Miner).ToArray(); if (result.Length > 0) { var windowActual = (result.Max(x => x.LastShare) - result.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var poolHashesAccumulated = result.Sum(x => x.Sum); var poolHashesCountAccumulated = result.Sum(x => x.Count); var poolHashrate = pool.HashrateFromShares(poolHashesAccumulated, windowActual) * HashrateBoostFactor; pool.PoolStats.ConnectedMiners = byMiner.Length; pool.PoolStats.PoolHashrate = (ulong)Math.Ceiling(poolHashrate); pool.PoolStats.SharesPerSecond = (int)(poolHashesCountAccumulated / windowActual); } } cf.RunTx((con, tx) => { var mapped = new Persistence.Model.PoolStats { PoolId = poolId, Created = start }; mapper.Map(pool.PoolStats, mapped); mapper.Map(pool.NetworkStats, mapped); statsRepo.InsertPoolStats(con, tx, mapped); }); if (result.Length == 0) { continue; } foreach (var minerHashes in byMiner) { cf.RunTx((con, tx) => { stats.Miner = minerHashes.Key; foreach (var item in minerHashes) { var windowActual = (minerHashes.Max(x => x.LastShare) - minerHashes.Min(x => x.FirstShare)).TotalSeconds; if (windowActual >= MinHashrateCalculationWindow) { var hashrate = pool.HashrateFromShares(item.Sum, windowActual) * HashrateBoostFactor; stats.Hashrate = hashrate; stats.Worker = item.Worker; stats.SharesPerSecond = (double)item.Count / windowActual; statsRepo.InsertMinerWorkerPerformanceStats(con, tx, stats); } } }); } } }