GetPercentile() public method

Compute a percentile from the underlying rolling buckets of values.

For performance reasons it maintains a single snapshot of the sorted values from all buckets that is re-generated each time the bucket rotates.

This means that if a bucket is 5000ms, then this method will re-compute a percentile at most once every 5000ms. value such as 99 (99th percentile), 99.5 (99.5th percentile), 50 (median, 50th percentile) to compute and retrieve percentile from rolling buckets. percentile value
public GetPercentile ( double percentile ) : int
percentile double
return int
        public void testRolling()
        {
            MockedClock time = new MockedClock();
            RollingPercentileNumber p = new RollingPercentileNumber(time, timeInMilliseconds, numberOfBuckets, bucketDataLength, DynamicProperties.Factory.AsProperty(true));
            p.AddValue(1000);
            p.AddValue(1000);
            p.AddValue(1000);
            p.AddValue(2000);

            Assert.Equal(1, p.GetBuckets().Count());

            // no bucket turnover yet so percentile not yet generated
            Assert.Equal(0, p.GetPercentile(50));

            time.Increment(6000);

            // still only 1 bucket until we touch it again
            Assert.Equal(1, p.GetBuckets().Count());

            // a bucket has been created so we have a new percentile
            Assert.Equal(1000, p.GetPercentile(50));

            // now 2 buckets since getting a percentile causes bucket retrieval
            Assert.Equal(2, p.GetBuckets().Count());

            p.AddValue(1000);
            p.AddValue(500);

            // should still be 2 buckets
            Assert.Equal(2, p.GetBuckets().Count());

            p.AddValue(200);
            p.AddValue(200);
            p.AddValue(1600);
            p.AddValue(200);
            p.AddValue(1600);
            p.AddValue(1600);

            // we haven't progressed to a new bucket so the percentile should be the same and ignore the most recent bucket
            Assert.Equal(1000, p.GetPercentile(50));

            // Increment to another bucket so we include all of the above in the PercentileSnapshot
            time.Increment(6000);

            // the rolling version should have the same data as creating a snapshot like this
            PercentileSnapshot ps = new PercentileSnapshot(1000, 1000, 1000, 2000, 1000, 500, 200, 200, 1600, 200, 1600, 1600);

            Assert.Equal(ps.GetPercentile(0.15), p.GetPercentile(0.15));
            Assert.Equal(ps.GetPercentile(0.50), p.GetPercentile(0.50));
            Assert.Equal(ps.GetPercentile(0.90), p.GetPercentile(0.90));
            Assert.Equal(ps.GetPercentile(0.995), p.GetPercentile(0.995));

            //Console.WriteLine("100th: " + ps.GetPercentile(100) + "  " + p.GetPercentile(100));
            //Console.WriteLine("99.5th: " + ps.GetPercentile(99.5) + "  " + p.GetPercentile(99.5));
            //Console.WriteLine("99th: " + ps.GetPercentile(99) + "  " + p.GetPercentile(99));
            //Console.WriteLine("90th: " + ps.GetPercentile(90) + "  " + p.GetPercentile(90));
            //Console.WriteLine("50th: " + ps.GetPercentile(50) + "  " + p.GetPercentile(50));
            //Console.WriteLine("10th: " + ps.GetPercentile(10) + "  " + p.GetPercentile(10));

            // mean = 1000+1000+1000+2000+1000+500+200+200+1600+200+1600+1600/12
            Assert.Equal(991, ps.Mean);
        }
        public void testThreadSafety()
        {
            MockedClock time = new MockedClock();
            RollingPercentileNumber p = new RollingPercentileNumber(time, 100, 25, 1000, DynamicProperties.Factory.AsProperty(true));

            int NUM_THREADS = 1000;
            int NUM_ITERATIONS = 1000000;

            var tasks = new Task [NUM_THREADS];

            long aggregateMetrics = 0; //same as a blackhole

            Random r = new Random();
            var token = new CancellationTokenSource();
            var metricsPoller = Task.Run(() =>
            {

                while (!token.IsCancellationRequested)
                {
                    Interlocked.Add(ref aggregateMetrics, p.Mean + p.GetPercentile(10) + p.GetPercentile(50) + p.GetPercentile(90));
                    //Console.WriteLine("AGGREGATE : " + p.GetPercentile(10) + " : " + p.GetPercentile(50) + " : " + p.GetPercentile(90));
                }
            });

            for (int i = 0; i < NUM_THREADS; i++)
            {
                int threadId = i;
                tasks[i] = Task.Run(() =>
                {

                    for (int j = 1; j < NUM_ITERATIONS / NUM_THREADS + 1; j++)
                    {
                        int nextInt = r.Next(100);
                        p.AddValue(nextInt);
                        if (threadId == 0)
                        {
                            time.Increment(1);
                        }
                    }
                });
            }

            if( !Task.WaitAll(tasks, 1000))
                throw new Exception("Timeout on all threads writing percentiles");
            token.Cancel();

            Interlocked.Add(ref aggregateMetrics, p.Mean + p.GetPercentile(10) + p.GetPercentile(50) + p.GetPercentile(90));
            //Console.WriteLine(p.Mean + " : " + p.GetPercentile(50) + " : " + p.GetPercentile(75) + " : " + p.GetPercentile(90) + " : " + p.GetPercentile(95) + " : " + p.GetPercentile(99));
        }
        public void testSampleDataOverTime1()
        {
            //Console.WriteLine("\n\n***************************** testSampleDataOverTime1 \n");

            MockedClock time = new MockedClock();
            RollingPercentileNumber p = new RollingPercentileNumber(time, timeInMilliseconds, numberOfBuckets, bucketDataLength, DynamicProperties.Factory.AsProperty(true));
            int previousTime = 0;
            for (int i = 0; i < SampleDataHolder1.data.Length/2; i++)
            {
                int timeInMillisecondsSinceStart = SampleDataHolder1.data[i,0];
                int latency = SampleDataHolder1.data[i,1];
                time.Increment(timeInMillisecondsSinceStart - previousTime);
                previousTime = timeInMillisecondsSinceStart;
                p.AddValue(latency);
            }

            //Console.WriteLine("0.01: " + p.GetPercentile(0.01));
            //Console.WriteLine("Median: " + p.GetPercentile(50));
            //Console.WriteLine("90th: " + p.GetPercentile(90));
            //Console.WriteLine("99th: " + p.GetPercentile(99));
            //Console.WriteLine("99.5th: " + p.GetPercentile(99.5));
            //Console.WriteLine("99.99: " + p.GetPercentile(99.99));

            //Console.WriteLine("Median: " + p.GetPercentile(50));
            //Console.WriteLine("Median: " + p.GetPercentile(50));
            //Console.WriteLine("Median: " + p.GetPercentile(50));

            /*
             * In a loop as a use case was found where very different values were calculated in subsequent requests.
             */
            for (int i = 0; i < 10; i++)
            {
                if (p.GetPercentile(50) > 5)
                {
                    throw new Exception("We expect around 2 but got: " + p.GetPercentile(50));
                }

                if (p.GetPercentile(99.5) < 20)
                {
                    throw new Exception("We expect to see some high values over 20 but got: " + p.GetPercentile(99.5));
                }
            }
        }
        public void testSampleDataOverTime2()
        {
            //Console.WriteLine("\n\n***************************** testSampleDataOverTime2 \n");
            MockedClock time = new MockedClock();
            int previousTime = 0;
            RollingPercentileNumber p = new RollingPercentileNumber(time, timeInMilliseconds, numberOfBuckets, bucketDataLength, DynamicProperties.Factory.AsProperty(true));
            for (int i = 0; i < SampleDataHolder2.data.Length/2; i++)
            {
                int timeInMillisecondsSinceStart = SampleDataHolder2.data[i,0];
                int latency = SampleDataHolder2.data[i,1];
                time.Increment(timeInMillisecondsSinceStart - previousTime);
                previousTime = timeInMillisecondsSinceStart;
                p.AddValue(latency);
            }

            //Console.WriteLine("0.01: " + p.GetPercentile(0.01));
            //Console.WriteLine("Median: " + p.GetPercentile(50));
            //Console.WriteLine("90th: " + p.GetPercentile(90));
            //Console.WriteLine("99th: " + p.GetPercentile(99));
            //Console.WriteLine("99.5th: " + p.GetPercentile(99.5));
            //Console.WriteLine("99.99: " + p.GetPercentile(99.99));

            if (p.GetPercentile(50) > 90 || p.GetPercentile(50) < 50)
            {
                throw new Exception("We expect around 60-70 but got: " + p.GetPercentile(50));
            }

            if (p.GetPercentile(99) < 400)
            {
                throw new Exception("We expect to see some high values over 400 but got: " + p.GetPercentile(99));
            }
        }
        public void testValueIsZeroAfterRollingWindowPassesAndNoTraffic()
        {
            MockedClock time = new MockedClock();
            var p = new RollingPercentileNumber(time, timeInMilliseconds, numberOfBuckets, bucketDataLength, DynamicProperties.Factory.AsProperty(true));
            p.AddValue(1000);
            p.AddValue(1000);
            p.AddValue(1000);
            p.AddValue(2000);
            p.AddValue(4000);

            Assert.Equal(1, p.GetBuckets().Count());

            // no bucket turnover yet so percentile not yet generated
            Assert.Equal(0, p.GetPercentile(50));

            time.Increment(6000);

            // still only 1 bucket until we touch it again
            Assert.Equal(1, p.GetBuckets().Count());

            // a bucket has been created so we have a new percentile
            Assert.Equal(1500, p.GetPercentile(50));

            // let 1 minute pass
            time.Increment(60000);

            // no data in a minute should mean all buckets are empty (or reset) so we should not have any percentiles
            Assert.Equal(0, p.GetPercentile(50));
        }