private IAlgorithm GetAlgorithm(string storageType, int capacity, int outflowQuantity, TimeSpan outflowUnit, int lockSeconds, StartTimeType startTimeType = StartTimeType.FromCurrent) { var leakyBucketRules = new LeakyBucketRule[] { new LeakyBucketRule(capacity, outflowQuantity, outflowUnit) { Id = Guid.NewGuid().ToString(), LockSeconds = lockSeconds, StartTimeType = startTimeType, ExtractTarget = (request) => { return((request as SimulationRequest).RequestResource); }, CheckRuleMatching = (request) => { return(true); }, } }; if (storageType == "redis") { var redisClient = StackExchange.Redis.ConnectionMultiplexer.Connect("127.0.0.1"); return(new RedisLeakyBucketAlgorithm(leakyBucketRules, redisClient)); } else { return(new InProcessLeakyBucketAlgorithm(leakyBucketRules)); } }
private IAlgorithm GetLeakyBucketProcessor(string storageType, int limitNumber) { var leakyBucketRules = new LeakyBucketRule[] { new LeakyBucketRule(limitNumber, 100, TimeSpan.FromSeconds(1)) { Id = Guid.NewGuid().ToString(), LockSeconds = 1, ExtractTarget = (request) => { return((request as SimulationRequest).RequestResource); }, CheckRuleMatching = (request) => { return(true); }, } }; if (storageType == "redis") { return(new RedisLeakyBucketAlgorithm(leakyBucketRules, redisClient)); } else { return(new InProcessLeakyBucketAlgorithm(leakyBucketRules)); } }
/// <summary> /// Increase the count value of the rate limit target for leaky bucket algorithm. /// </summary> /// <param name="target">The target</param> /// <param name="amount">amount of increase</param> /// <param name="currentRule">The current rule</param> /// <returns>Amount of request in the bucket</returns> public Tuple <bool, long, long> InnerCheckSingleRule(string target, long amount, LeakyBucketRule currentRule) { bool locked = CheckLocked(target); if (locked) { return(Tuple.Create(true, -1L, -1L)); } var outflowUnit = (int)currentRule.OutflowUnit.TotalMilliseconds; var currentTime = _timeProvider.GetCurrentLocalTime(); lock (target) { var countData = _cache.GetCacheItem(target); if (countData == null) { var startTime = AlgorithmStartTime.ToSpecifiedTypeTime(currentTime, TimeSpan.FromMilliseconds(outflowUnit), currentRule.StartTimeType); _cache.Add(target, new CountValue(amount) { LastFlowTime = startTime }, DateTimeOffset.MaxValue); return(Tuple.Create(false, 0L, 0L)); } var countValue = (CountValue)countData.Value; var lastTime = countValue.LastFlowTime; var pastTime = currentTime - lastTime; var lastTimeChanged = false; var pastTimeMilliseconds = pastTime.TotalMilliseconds; long newCount = 0; long wait = 0; if (pastTimeMilliseconds < outflowUnit) { newCount = countValue.Value + amount; if (newCount <= currentRule.Capacity + currentRule.OutflowQuantityPerUnit) { var currentUnitRestTime = outflowUnit - pastTimeMilliseconds; wait = CalculateWaitTime(currentRule.OutflowQuantityPerUnit, outflowUnit, newCount, currentUnitRestTime); } else { if (currentRule.LockSeconds > 0) { TryLock(target, currentTime, TimeSpan.FromSeconds(currentRule.LockSeconds)); } return(Tuple.Create(true, currentRule.Capacity, -1L)); } } else { var pastOutflowUnitQuantity = (int)(pastTimeMilliseconds / outflowUnit); lastTime = lastTime.AddMilliseconds(pastOutflowUnitQuantity * outflowUnit); lastTimeChanged = true; if (countValue.Value < currentRule.OutflowQuantityPerUnit) { newCount = amount; wait = 0; } else { var pastOutflowQuantity = currentRule.OutflowQuantityPerUnit * pastOutflowUnitQuantity; newCount = countValue.Value - pastOutflowQuantity + amount; newCount = newCount > 0 ? newCount : amount; var currentUnitRestTime = outflowUnit - (currentTime - lastTime).TotalMilliseconds; wait = CalculateWaitTime(currentRule.OutflowQuantityPerUnit, outflowUnit, newCount, currentUnitRestTime); } } countValue.Value = newCount; if (lastTimeChanged) { countValue.LastFlowTime = lastTime; } var viewCount = newCount - currentRule.OutflowQuantityPerUnit; viewCount = viewCount < 0 ? 0 : viewCount; return(Tuple.Create(false, viewCount, wait)); } }
private static void DoLeakyBucket() { var leakyBucketRules = new LeakyBucketRule[] { new LeakyBucketRule(30, 10, TimeSpan.FromSeconds(1)) { Id = "2", ExtractTarget = (request) => { return((request as SimulationRequest).RequestResource); }, CheckRuleMatching = (request) => { return(true); }, } }; var timeProvider = new LocalTimeProvider(); var algorithm = new InProcessLeakyBucketAlgorithm(leakyBucketRules, timeProvider, true); // var redisClient = StackExchange.Redis.ConnectionMultiplexer.Connect("127.0.0.1"); // var algorithm = new RedisLeakyBucketAlgorithm(leakyBucketRules, redisClient, timeProvider, true); for (int i = 0; i < 160; i++) { if (i == 50) { algorithm.UpdateRules(new LeakyBucketRule[] { new LeakyBucketRule(50, 10, TimeSpan.FromSeconds(1)) { Id = "2", ExtractTarget = (request) => { return((request as SimulationRequest).RequestResource); }, CheckRuleMatching = (request) => { return(true); }, } }); } if (i == 70) { // Attention: // If you use delayed processing, such as Task.Delay, // increasing the outflow rate will have a greater damage to the data processing sequence. algorithm.UpdateRules(new LeakyBucketRule[] { new LeakyBucketRule(50, 20, TimeSpan.FromSeconds(1)) { Id = "2", ExtractTarget = (request) => { return((request as SimulationRequest).RequestResource); }, CheckRuleMatching = (request) => { return(true); }, } }); Thread.Sleep(1000); } if (i == 110 || i == 120 || i == 130 || i == 140 || i == 150) { Thread.Sleep(1000); } var result = algorithm.Check(new SimulationRequest() { RequestId = Guid.NewGuid().ToString(), RequestResource = "home", Parameters = new Dictionary <string, string>() { { "from", "sample" }, } }, null); // Wait in the return value is very useful, you can use it in the delay queue, // you can also make the current thread pause for a specified period of time. foreach (var r in result.RuleCheckResults) { Console.WriteLine($"[{i}] Target:{r.Target},IsLimit:{r.IsLimit},Count:{r.Count},Wait:{r.Wait}."); } } }