Esempio n. 1
0
            public void ReturnsNonZeroTimeToNextSlotTooManyRequestsAreSentInAShortPeriodOfTime(int slotsPerWindow)
            {
                TimeService.CurrentDateTime.Returns(baseTime);
                var bucket = new LeakyBucket(TimeService, AnalyticsService, slotsPerWindow);

                bucket.TryClaimFreeSlots(slotsPerWindow, out _);
                var claimed = bucket.TryClaimFreeSlot(out var time);

                claimed.Should().BeFalse();
                time.Should().BeGreaterThan(TimeSpan.Zero);
            }
Esempio n. 2
0
            public void ReturnsNonZeroTimeToNextSlotWhenTooManySlotsAreUsedInAShortPeriodOfTime(int slotsPerWindowLimit)
            {
                timeService.CurrentDateTime.Returns(baseTime);
                var bucket = new LeakyBucket(timeService, slotsPerWindowLimit);

                bucket.TryClaimFreeSlot(out _);

                var claimed = bucket.TryClaimFreeSlots(slotsPerWindowLimit, out var time);

                claimed.Should().BeFalse();
                time.Should().BeGreaterThan(TimeSpan.Zero);
            }
Esempio n. 3
0
            public void SendsAllRequestsInAShortPeriodOfTimeUntilReachingTheLimit(int slotsPerWindow)
            {
                TimeService.CurrentDateTime.Returns(baseTime);
                var bucket = new LeakyBucket(TimeService, AnalyticsService, slotsPerWindow);

                for (var i = 0; i < slotsPerWindow; i++)
                {
                    var claimed = bucket.TryClaimFreeSlot(out var timeToNextFreeSlot);

                    claimed.Should().BeTrue();
                    timeToNextFreeSlot.Should().Be(TimeSpan.Zero);
                }
            }
Esempio n. 4
0
            public void ThrowsWhenTooManySlotsAreRequested(PositiveInt slotsPerWindow)
            {
                if (slotsPerWindow.Get == int.MaxValue)
                {
                    return;
                }

                TimeService.CurrentDateTime.Returns(baseTime);
                var bucket = new LeakyBucket(TimeService, AnalyticsService, slotsPerWindow.Get);

                Action claimMany = () => bucket.TryClaimFreeSlots(slotsPerWindow.Get + 1, out _);

                claimMany.Should().Throw <InvalidOperationException>();
            }
Esempio n. 5
0
            public void CalculatesTheDelayUntilTheNextFreeSlot()
            {
                TimeService.CurrentDateTime.Returns(
                    baseTime,
                    baseTime + TimeSpan.FromSeconds(6),
                    baseTime + TimeSpan.FromSeconds(8));
                var bucket = new LeakyBucket(TimeService, AnalyticsService, slotsPerWindow: 2, movingWindowSize: TimeSpan.FromSeconds(10));

                bucket.TryClaimFreeSlot(out _);
                bucket.TryClaimFreeSlot(out _);
                var claimed = bucket.TryClaimFreeSlot(out var timeToFreeSlot);

                claimed.Should().BeFalse();
                timeToFreeSlot.Should().Be(TimeSpan.FromSeconds(2));
            }
Esempio n. 6
0
        public async Task <T> Run <T>(BizwebRequestMessage baseReqMsg,
                                      ExecuteRequestAsync <T> executeRequestAsync)
        {
            var         accessToken = GetAccessToken(baseReqMsg);
            LeakyBucket bucket      = null;

            if (accessToken != null)
            {
                bucket = LeakyBucket.GetBucketByToken(accessToken);
            }

            while (true)
            {
                using (var reqMsg = baseReqMsg.Clone())
                {
                    if (accessToken != null)
                    {
                        await bucket.GrantAsync();
                    }

                    try
                    {
                        var fullResult = await executeRequestAsync(reqMsg);

                        var bucketState       = GetBucketState(fullResult.Response);
                        var reportedFillLevel = bucketState.Item1;
                        var reportedCapacity  = bucketState.Item2;

                        if (reportedFillLevel != null && reportedCapacity != null)
                        {
                            bucket?.SetBucketState(reportedFillLevel.Value, reportedCapacity.Value);
                        }

                        return(fullResult.Result);
                    }
                    catch (BizwebSharpException)
                    {
                        //An exception may still occur:
                        //-Shopify may have a slightly different algorithm
                        //-Shopify may change to a different algorithm in the future
                        //-There may be timing and latency delays
                        //-Multiple programs may use the same access token
                        //-Multiple instances of the same program may use the same access token
                        await Task.Delay(THROTTLE_DELAY);
                    }
                }
            }
        }
        public async Task <RequestResult <T> > Run <T>(CloneableRequestMessage baseRequest, ExecuteRequestAsync <T> executeRequestAsync, CancellationToken cancellationToken)
        {
            var         accessToken = GetAccessToken(baseRequest);
            LeakyBucket bucket      = null;

            if (accessToken != null)
            {
                bucket = _shopAccessTokenToLeakyBucket.GetOrAdd(accessToken, _ => new LeakyBucket());
            }

            while (true)
            {
                var request = baseRequest.Clone();

                if (accessToken != null)
                {
                    await bucket.GrantAsync();
                }

                try
                {
                    var fullResult = await executeRequestAsync(request);

                    var bucketState = LeakyBucketState.Get(fullResult.Response);

                    if (bucketState != null)
                    {
                        bucket?.SetState(bucketState);
                    }

                    return(fullResult);
                }
                catch (ShopifyRateLimitException ex) when(ex.Reason == ShopifyRateLimitReason.BucketFull || !_retryOnlyIfLeakyBucketFull)
                {
                    //Only retry if breach caused by full bucket
                    //Other limits will bubble the exception because it's not clear how long the program should wait
                    //Even if there is a Retry-After header, we probably don't want the thread to sleep for potentially many hours
                    //
                    //An exception may still occur:
                    //-Shopify may have a slightly different algorithm
                    //-Shopify may change to a different algorithm in the future
                    //-There may be timing and latency delays
                    //-Multiple programs may use the same access token
                    //-Multiple instances of the same program may use the same access token
                    await Task.Delay(THROTTLE_DELAY, cancellationToken);
                }
            }
        }
Esempio n. 8
0
        public async Task ReturnsDelayTransitionWhenTheLeakyBucketDoesNotHaveFreeSlots()
        {
            var delay = TimeSpan.FromSeconds(123.45);

            LeakyBucket.TryClaimFreeSlot(out _).Returns(x =>
            {
                x[0] = delay;
                return(false);
            });
            var state  = CreateState();
            var entity = Substitute.For <IThreadSafeTestModel>();

            var transition = await state.Start(entity);

            transition.Result.Should().Be(state.PreventOverloadingServer);
            ((Transition <TimeSpan>)transition).Parameter.Should().Be(delay);
        }
Esempio n. 9
0
        public void WaitIfNotEnoughAvailable()
        {
            now = DateTime.UtcNow;

            var b = new LeakyBucket(10, 2, () => now);

            Assert.Equal(10, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(9).IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            Assert.False(b.WaitForAvailableAsync(4).IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            Assert.False(b.WaitForAvailableAsync(5).IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);
        }
        // conclude the stop watch upon receiving a response
        public virtual void Finish(T stopWatch)
        {
            if (stopWatch == null) // in case current reqeuest has been ignored when this.Start
            {
                return;
            }
            var bLogged             = false; // so to move out stopWatch execution out of locked critical section
            LinkedListNode <T> node = null;

            lock (this)
            {
                if (LeakyBucketHash.ContainsKey(stopWatch.StartTime)) // current request has been sampled
                {
                    node = LeakyBucketHash[stopWatch.StartTime];
                    LeakyBucketHash.Remove(stopWatch.StartTime);
                    if ((int)(DateTime.Now - node.Value.StartTime).TotalSeconds > Configue.TimeOutInSecond)
                    {
                        OnTimeOut();
                    }
                    else
                    {
                        OnFinish();
                    }

                    if (node.Value.StartTime < BottomOfBucket.Value.StartTime) // in case current request already been leaked
                    {
                        bLogged = true;
                        LeakyBucket.Remove(node);
                        LastLogTime = DateTime.Now;
                    }
                }
            }
            if (node == null)
            {
                return;
            }

            node.Value.Stop(null); // stop the located stopWatch

            if (bLogged)
            {
                Logger.LogInfo(">>> Log upon receiving leaked response, sent at " + node.Value.StartTime.Ticks);
                node.Value.Log();
            }
        }
Esempio n. 11
0
        public void CanUseRateLimitWithBurstNoDelay()
        {
            Assert.True(LimitRequestZone.TryParse("zone=mylimit rate=10r/s burst=20 nodelay", out var limitRequestZone));
            var delay = new MockDelay();
            var queue = new LeakyBucket(limitRequestZone, delay);

            Assert.Equal(20, queue.RemainingSlots);
            Assert.Equal(0, queue.UsedSlots);
            // 1st request should be processed immediately
            var wait       = queue.Throttle();
            var processing = queue.DrainNext();

            Assert.True(wait.Wait(10));
            Assert.Equal(19, queue.RemainingSlots);
            Assert.Equal(1, queue.UsedSlots);
            Assert.True(processing.Wait(10)); // Thanks to nodelay processing do not have to wait

            // 2nd request should be processed immediately, but the slot kept
            wait       = queue.Throttle();
            processing = queue.DrainNext();
            Assert.True(wait.Wait(10));
            Assert.True(processing.Wait(10));       // Thanks to nodelay processing do not have to wait
            Assert.Equal(18, queue.RemainingSlots); // But the slots should not be free
            Assert.Equal(2, queue.UsedSlots);

            // Can process, thanks to nodelay
            delay.AdvanceMilliseconds(99);
            Assert.Equal(18, queue.RemainingSlots);
            Assert.Equal(2, queue.UsedSlots);

            // Though after 100ms, one slot should be freed, and the queued task executed
            delay.AdvanceMilliseconds(1);
            Thread.Sleep(1); // Sleep necessary as the WaitAfter is running concurrently
            Assert.Equal(19, queue.RemainingSlots);
            Assert.Equal(1, queue.UsedSlots);
            Thread.Sleep(1); // Sleep necessary as the WaitAfter is running concurrently

            // +100 ms passed, second slot if released
            delay.AdvanceMilliseconds(99);
            Assert.Equal(19, queue.RemainingSlots);
            delay.AdvanceMilliseconds(1);

            Thread.Sleep(1); // Sleep necessary as the WaitAfter is running concurrently
            Assert.Equal(20, queue.RemainingSlots);
        }
Esempio n. 12
0
            public void AllowsSlotsSpreadOutAcrossTheTimeLimitSoThatTheyAreNotSentTooCloseToEachOther(int slotsPerWindow)
            {
                var movingWindowSize            = TimeSpan.FromSeconds(10);
                var uniformDelayBetweenRequests = movingWindowSize / slotsPerWindow;
                var times = Enumerable.Range(1, 2 * slotsPerWindow)
                            .Select(n => baseTime + (n * uniformDelayBetweenRequests)).ToArray();

                TimeService.CurrentDateTime.Returns(baseTime, times);
                var bucket = new LeakyBucket(TimeService, AnalyticsService, slotsPerWindow, movingWindowSize);

                for (var i = 0; i < times.Length - 1; i++)
                {
                    var claimed = bucket.TryClaimFreeSlot(out var timeToNextSlot);

                    claimed.Should().BeTrue();
                    timeToNextSlot.Should().Be(TimeSpan.Zero);
                }
            }
Esempio n. 13
0
        public void CanUseRateLimitWithBurst()
        {
            Assert.True(LimitRequestZone.TryParse("zone=mylimit rate=10r/s burst=20", out var limitRequestZone));
            var delay = new MockDelay();
            var queue = new LeakyBucket(limitRequestZone, delay);

            Assert.Equal(20, queue.RemainingSlots);
            Assert.Equal(0, queue.UsedSlots);

            // 1st request should be processed immediately
            var wait       = queue.Throttle();
            var processing = queue.DrainNext();

            Assert.True(wait.Wait(10));
            Assert.Equal(19, queue.RemainingSlots);
            Assert.Equal(1, queue.UsedSlots);

            // 2nd request should be queued
            wait = queue.Throttle();
            Assert.False(wait.Wait(10));
            Assert.Equal(18, queue.RemainingSlots);
            Assert.Equal(2, queue.UsedSlots);

            // Can't process, as we need to throttle
            Assert.False(processing.Wait(10));
            delay.AdvanceMilliseconds(99);
            Assert.False(processing.Wait(10));
            Assert.False(wait.Wait(10));

            // Though after 100ms, one slot should be freed, and the queued task executed
            delay.AdvanceMilliseconds(1);
            Assert.True(processing.Wait(10));
            processing = queue.DrainNext();
            Assert.True(wait.Wait(10));
            Assert.Equal(19, queue.RemainingSlots);
            Assert.Equal(1, queue.UsedSlots);

            // But still processing blocked...
            Assert.False(processing.Wait(10));
            // Until 100 ms passed
            delay.AdvanceMilliseconds(100);
            Assert.True(processing.Wait(10));
            Assert.Equal(20, queue.RemainingSlots);
        }
Esempio n. 14
0
        public async Task <T> Run <T>(CloneableRequestMessage baseRequest, ExecuteRequestAsync <T> executeRequestAsync)
        {
            var         accessToken = GetAccessToken(baseRequest);
            LeakyBucket bucket      = null;

            if (accessToken != null)
            {
                bucket = _shopAccessTokenToLeakyBucket.GetOrAdd(accessToken, _ => new LeakyBucket());
            }

            while (true)
            {
                var request = baseRequest.Clone();

                if (accessToken != null)
                {
                    await bucket.GrantAsync();
                }

                try
                {
                    var fullResult = await executeRequestAsync(request);

                    var bucketState = GetBucketState(fullResult.Response);

                    if (bucketState != null)
                    {
                        bucket?.SetState(bucketState);
                    }

                    return(fullResult.Result);
                }
                catch (SquareSpaceRateLimitException)
                {
                    //An exception may still occur:
                    //-SquareSpace may have a slightly different algorithm
                    //-SquareSpace may change to a different algorithm in the future
                    //-There may be timing and latency delays
                    //-Multiple programs may use the same access token
                    //-Multiple instances of the same program may use the same access token
                    await Task.Delay(THROTTLE_DELAY);
                }
            }
        }
Esempio n. 15
0
        public static void ConfigureTransitions(
            ITransitionConfigurator transitions,
            ITogglDatabase database,
            ITogglApi api,
            ITogglDataSource dataSource,
            IScheduler scheduler,
            ITimeService timeService,
            IAnalyticsService analyticsService,
            StateMachineEntryPoints entryPoints,
            ISyncStateQueue queue)
        {
            var minutesLeakyBucket = new LeakyBucket(timeService, slotsPerWindow: 60, movingWindowSize: TimeSpan.FromSeconds(60));
            var secondsLeakyBucket = new LeakyBucket(timeService, slotsPerWindow: 3, movingWindowSize: TimeSpan.FromSeconds(1));
            var rateLimiter        = new RateLimiter(secondsLeakyBucket, scheduler);

            configurePullTransitions(transitions, database, api, dataSource, timeService, analyticsService, scheduler, entryPoints.StartPullSync, minutesLeakyBucket, rateLimiter, queue);
            configurePushTransitions(transitions, api, dataSource, analyticsService, minutesLeakyBucket, rateLimiter, scheduler, entryPoints.StartPushSync);
            configureCleanUpTransitions(transitions, timeService, dataSource, analyticsService, entryPoints.StartCleanUp);
        }
Esempio n. 16
0
        public void RunSynchronouslyIfEnoughAvailable()
        {
            now = DateTime.UtcNow;

            var b = new LeakyBucket(40, 2, () => now);

            Assert.Equal(40, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(1).IsCompleted);
            Assert.Equal(39, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(1).IsCompleted);
            Assert.Equal(38, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(1).IsCompleted);
            Assert.Equal(37, b.ComputedCurrentlyAvailable);

            now = now.AddSeconds(1);
            Assert.Equal(39, b.ComputedCurrentlyAvailable);
        }
Esempio n. 17
0
        public async void CanUseRateLimitWithBurstNoDelay2()
        {
            Assert.True(LimitRequestZone.TryParse("zone=mylimit rate=10r/s burst=20 nodelay", out var limitRequestZone));
            var delay = new MockDelay();
            var queue = new LeakyBucket(limitRequestZone, delay);

            List <Task> waits = new List <Task>();

            for (int i = 0; i < 20; i++)
            {
                waits.Add(queue.Throttle());
            }
            Assert.False(await queue.Throttle());
            for (int i = 0; i < 20; i++)
            {
                delay.AdvanceMilliseconds(100);
                var processing = queue.DrainNext();
                Task.WaitAny(waits.ToArray());
                waits.RemoveAll(t => t.IsCompletedSuccessfully);
                Assert.Equal(20 - i - 1, waits.Count);
                Assert.True(processing.IsCompletedSuccessfully);
            }
        }
Esempio n. 18
0
        public async Task BlockedSingleCallsCompleteAfterEnoughTime()
        {
            now = DateTime.UtcNow;

            var b = new LeakyBucket(10, 2, () => now);

            Assert.Equal(10, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(5).IsCompleted);
            Assert.Equal(5, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(4).IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            Task task = b.WaitForAvailableAsync(3);

            Assert.False(task.IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            await PassSeconds(2);

            Assert.True(task.IsCompleted);
            Assert.Equal(2, b.ComputedCurrentlyAvailable);
        }
Esempio n. 19
0
 static Func <IHttpRequest, Task <HttpResponse> > Wrap(Func <IHttpRequest, Task <HttpResponse> > handler, LeakyBucket rateLimitBucket)
 {
     return(request => rateLimitBucket.Fill(1) ? handler(request) : Task.FromResult(TooManyRequests));
 }
 public AsyncHttpServer(Dictionary <string, IRequestHandler> routes)
 {
     listener    = new HttpListener();
     leakyBucket = new LeakyBucket(100, 200, 2000, Environment.ProcessorCount * 10);
     this.routes = routes;
 }
Esempio n. 21
0
 RateLimitedEndpoint(Method method, Route route, Func <IHttpRequest, Task <HttpResponse> > handler, LeakyBucket rateLimitBucket) : base(method, route, Wrap(handler, rateLimitBucket))
 {
     this.rateLimitBucket = rateLimitBucket;
 }
Esempio n. 22
0
        public void BucketIsInitializedWithAvailableEqualToCapacity()
        {
            var bucket = new LeakyBucket(10, 1, Time.OneSecond);

            Assert.AreEqual(10, bucket.AvailableTokens);
        }
Esempio n. 23
0
        public void ConsumeBlocksUntilTokensAreAvailable()
        {
            var time         = new DateTime(2000, 01, 01);
            var timeProvider = new ManualTimeProvider(time);

            const int refillAmount   = 1;
            var       refillInterval = TimeSpan.FromMinutes(1);
            var       refillStrategy = new FixedIntervalRefillStrategy(timeProvider, refillAmount, refillInterval);

            // using spin wait strategy to ensure we update AvailableTokens as quickly as possible
            var sleepStrategy = new BusyWaitSleepStrategy();

            const int capacity = 10;
            var       bucket   = new LeakyBucket(capacity, sleepStrategy, refillStrategy, timeProvider);

            // first remove half the capacity
            bucket.Consume(capacity / 2);

            // we've consumed half of the available tokens
            Assert.AreEqual(capacity / 2, bucket.AvailableTokens);

            var taskStarted            = new ManualResetEvent(false);
            var bucketConsumeCompleted = new ManualResetEvent(false);

            Task.Run(() =>
            {
                taskStarted.Set();

                // this will block until time advances
                bucket.Consume(capacity);
                bucketConsumeCompleted.Set();
            });

            taskStarted.WaitOne();

            // each loop we'll advance one refill increment and when the loop finishes
            // the bucket's consume operation will succeed
            var initialAmount = bucket.AvailableTokens;

            for (int i = 0; i < 5; i++)
            {
                timeProvider.Advance(refillInterval);

                // on the last loop, the bucket will consume all ten
                if (i != 4)
                {
                    var count = 0;
                    while (++count < 100 && (initialAmount + (1 + i) * refillAmount) != bucket.AvailableTokens)
                    {
                        Thread.Sleep(1);
                    }

                    // each time we advance the number of available tokens will increment by the refill amount
                    Assert.AreEqual(initialAmount + (1 + i) * refillAmount, bucket.AvailableTokens,
                                    $"CurrentTime: {timeProvider.GetUtcNow():O}: Iteration: {i}"
                                    );
                }
            }

            // now that we've advanced, bucket consumption should have completed
            // we provide for a small timeout to support non-multi-threaded machines
            Assert.IsTrue(bucketConsumeCompleted.WaitOne(1000), "Timeout waiting for consumer");
            Assert.AreEqual(0, bucket.AvailableTokens, $"There are still available tokens {bucket.AvailableTokens}");
        }
Esempio n. 24
0
        public async Task BlockedMultipleCallsCompleteAfterEnoughTime()
        {
            now = DateTime.UtcNow;

            var b = new LeakyBucket(10, 2, () => now);

            Assert.Equal(10, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(9).IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            var task1 = b.WaitForAvailableAsync(3);

            Assert.False(task1.IsCompleted);

            var task2 = b.WaitForAvailableAsync(3);

            Assert.False(task2.IsCompleted);

            var task3 = b.WaitForAvailableAsync(3);

            Assert.False(task3.IsCompleted);

            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            await PassSeconds(1);

            Assert.True(task1.IsCompleted);
            Assert.False(task2.IsCompleted);
            Assert.False(task3.IsCompleted);
            Assert.Equal(0, b.ComputedCurrentlyAvailable);

            await PassSeconds(2);

            Assert.True(task2.IsCompleted);
            Assert.False(task3.IsCompleted);
            Assert.Equal(1, b.ComputedCurrentlyAvailable);

            await PassSeconds(1);

            Assert.True(task3.IsCompleted);
            Assert.Equal(0, b.ComputedCurrentlyAvailable);

            now = now.AddSeconds(5);
            Assert.Equal(10, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(4).IsCompleted);
            Assert.Equal(6, b.ComputedCurrentlyAvailable);

            Assert.True(b.WaitForAvailableAsync(4).IsCompleted);
            Assert.Equal(2, b.ComputedCurrentlyAvailable);

            var task4 = b.WaitForAvailableAsync(4);

            Assert.False(task4.IsCompleted);
            Assert.Equal(2, b.ComputedCurrentlyAvailable);

            await PassSeconds(1);

            Assert.True(task4.IsCompleted);
            Assert.Equal(0, b.ComputedCurrentlyAvailable);
        }
Esempio n. 25
0
 protected BasePushEntityStateTests()
 {
     RateLimiter.WaitForFreeSlot().Returns(Observable.Return(Unit.Default));
     LeakyBucket.TryClaimFreeSlot(out _).Returns(true);
 }
        // leaking bucket implementation
        public virtual T Start()
        {
            T         stopWatch  = null;
            IList <T> toBeLogged = null;

            lock (this)
            {
                while (LeakyBucket.First != null && LeakyBucket.First.Value.StartTime < BottomOfBucket.Value.StartTime && (int)(DateTime.Now - LeakyBucket.First.Value.StartTime).TotalSeconds > Configue.TimeOutInSecond)
                {
                    LeakyBucket.First.Value.Timeout();
                    OnTimeOut();
                    LeakyBucketHash.Remove(LeakyBucket.First.Value.StartTime);
                    LeakyBucket.RemoveFirst();
                    LastLogTime = DateTime.Now;
                }

                // before tracking current request, process existing requests already in bucket no more than QPS allowed
                int leakingSize = 0;
                if (BottomOfBucket != null)
                {
                    //Logger.LogInfo("bottom at " + BottomOfBucket.Value.RequestDetail["index"]);
                    leakingSize = GetLeakingSince(LastLogTime);
                }
                for (var i = 0; i < leakingSize; i++) // leaking limited number of items out of bucket according to QPS
                {
                    if (BottomOfBucket == null)       // bucket is empty;
                    {
                        break;
                    }
                    if (BottomOfBucket.Value.IsFinished()) // if it's already stopped/received when being leaked
                    {
                        var prev = BottomOfBucket;
                        BottomOfBucket = BottomOfBucket.Next; // leak

                        //directly remove and log
                        LeakyBucket.Remove(prev);
                        Logger.LogInfo(">> Logging the already finishsed when leaking, " + "sent at " + prev.Value.StartTime.Ticks);
                        if (toBeLogged == null)
                        {
                            toBeLogged = new List <T>();
                        }
                        toBeLogged.Add(prev.Value);
                        LastLogTime = DateTime.Now;
                    }
                    else
                    {
                        BottomOfBucket = BottomOfBucket.Next; // leak only
                    }
                    nItemsInBucket -= 1;
                }

                // if bucket is full, ignore current request to realize down sampling
                if (nItemsInBucket >= Configue.BucketSize)
                {
                    Logger.LogInfo("Ignored, too frequent, nItemsInBucket as " + nItemsInBucket);
                }
                else
                {
                    // enqueue and track this request
                    stopWatch = CreateStopWatch(Configue);
                    var node = new LinkedListNode <T>(stopWatch);

                    LeakyBucket.AddLast(node);
                    LeakyBucketHash[stopWatch.StartTime] = node; // so to locate the stopWatch with O(1) when its response received and finish method invoked.
                    nItemsInBucket += 1;
                    Logger.LogInfo("Enqueued" + stopWatch.StartTime.Ticks + ", nItemsInBucket become " + nItemsInBucket);
                    if (BottomOfBucket == null)
                    {
                        BottomOfBucket = node;
                    }
                }
            }

            // move watcher's Log logic out of locked critical section in case any lengthy operation defined
            if (toBeLogged != null)
            {
                foreach (var watcher in toBeLogged)
                {
                    watcher.Log();
                }
            }


            return(stopWatch);
        }