public void TestValueIsZeroAfterRollingWindowPassesAndNoTraffic() { var time = new MockedTime(); var p = new HystrixRollingPercentile(time, TimeInMilliseconds, NumberOfBuckets, BucketDataLength, Enabled); p.AddValue(1000); p.AddValue(1000); p.AddValue(1000); p.AddValue(2000); p.AddValue(4000); Assert.Equal(1, p._buckets.Size); // no bucket turnover yet so percentile not yet generated Assert.Equal(0, p.GetPercentile(50)); time.Increment(6000); // still only 1 bucket until we touch it again Assert.Equal(1, p._buckets.Size); // a bucket has been created so we have a new percentile Assert.Equal(1500, p.GetPercentile(50)); // let 1 minute pass time.Increment(60000); // no data in a minute should mean all buckets are empty (or reset) so we should not have any percentiles Assert.Equal(0, p.GetPercentile(50)); }
public void TestSampleDataOverTime2() { output.WriteLine("\n\n***************************** testSampleDataOverTime2 \n"); var time = new MockedTime(); int previousTime = 0; var p = new HystrixRollingPercentile(time, TimeInMilliseconds, NumberOfBuckets, BucketDataLength, Enabled); for (int i = 0; i < SampleDataHolder2.Data.Length; i++) { int timeInMillisecondsSinceStart = SampleDataHolder2.Data[i][0]; int latency = SampleDataHolder2.Data[i][1]; time.Increment(timeInMillisecondsSinceStart - previousTime); previousTime = timeInMillisecondsSinceStart; p.AddValue(latency); } output.WriteLine("0.01: " + p.GetPercentile(0.01)); output.WriteLine("Median: " + p.GetPercentile(50)); output.WriteLine("90th: " + p.GetPercentile(90)); output.WriteLine("99th: " + p.GetPercentile(99)); output.WriteLine("99.5th: " + p.GetPercentile(99.5)); output.WriteLine("99.99: " + p.GetPercentile(99.99)); if (p.GetPercentile(50) > 90 || p.GetPercentile(50) < 50) { Assert.True(false, "We expect around 60-70 but got: " + p.GetPercentile(50)); } if (p.GetPercentile(99) < 400) { Assert.True(false, "We expect to see some high values over 400 but got: " + p.GetPercentile(99)); } }
public void Returns_The_Mean_Of_All_Values_In_The_Snapshot_When_GetMetricsRollingPercentileEnabled_Is_True() { var timeInMilliseconds = 10000; var numberOfBuckets = 10; var bucketDataLength = 100; var configurationServiceMock = new Mock <IHystrixConfigurationService>(); configurationServiceMock.Setup(x => x.GetMetricsRollingPercentileEnabled()).Returns(true); var rollingPercentile = new HystrixRollingPercentile(timeInMilliseconds, numberOfBuckets, bucketDataLength, configurationServiceMock.Object); rollingPercentile.AddValue(243); rollingPercentile.AddValue(157); Thread.Sleep(1500); // act var mean = rollingPercentile.GetMean(); Assert.Equal(200, mean); }
public void TestThreadSafety() { var time = new MockedTime(); var p = new HystrixRollingPercentile(time, 100, 25, 1000, true); int num_threads = 1000; // .NET Core StackOverflow int num_iterations = 1000000; var latch = new CountdownEvent(num_threads); var aggregateMetrics = new AtomicInteger(); // same as a blackhole var r = new Random(); var cts = new CancellationTokenSource(); var metricsPoller = Task.Run(() => { while (!cts.Token.IsCancellationRequested) { aggregateMetrics.AddAndGet(p.Mean + p.GetPercentile(10) + p.GetPercentile(50) + p.GetPercentile(90)); } }); for (int i = 0; i < num_threads; i++) { int threadId = i; Task.Run(() => { for (int j = 1; j < (num_iterations / num_threads) + 1; j++) { int nextInt = r.Next(100); p.AddValue(nextInt); if (threadId == 0) { time.Increment(1); } } latch.SignalEx(); }); } try { latch.Wait(TimeSpan.FromSeconds(100)); cts.Cancel(); } catch (Exception) { Assert.True(false, "Timeout on all threads writing percentiles"); } aggregateMetrics.AddAndGet(p.Mean + p.GetPercentile(10) + p.GetPercentile(50) + p.GetPercentile(90)); output.WriteLine(p.Mean + " : " + p.GetPercentile(50) + " : " + p.GetPercentile(75) + " : " + p.GetPercentile(90) + " : " + p.GetPercentile(95) + " : " + p.GetPercentile(99)); }
public void Adds_Value_To_Current_Bucket_When_GetMetricsRollingPercentileEnabled_Is_True() { var timeInMilliseconds = 10000; var numberOfBuckets = 10; var bucketDataLength = 100; var configurationServiceMock = new Mock <IHystrixConfigurationService>(); configurationServiceMock.Setup(x => x.GetMetricsRollingPercentileEnabled()).Returns(true); var rollingPercentile = new HystrixRollingPercentile(timeInMilliseconds, numberOfBuckets, bucketDataLength, configurationServiceMock.Object); // act rollingPercentile.AddValue(243); }
public void TestDoesNothingWhenDisabled() { MockedTime time = new MockedTime(); int previousTime = 0; HystrixRollingPercentile p = new HystrixRollingPercentile(time, TimeInMilliseconds, NumberOfBuckets, BucketDataLength, false); for (int i = 0; i < SampleDataHolder2.Data.Length; i++) { int timeInMillisecondsSinceStart = SampleDataHolder2.Data[i][0]; int latency = SampleDataHolder2.Data[i][1]; time.Increment(timeInMillisecondsSinceStart - previousTime); previousTime = timeInMillisecondsSinceStart; p.AddValue(latency); } Assert.Equal(-1, p.GetPercentile(50)); Assert.Equal(-1, p.GetPercentile(75)); Assert.Equal(-1, p.Mean); }
public void TestSampleDataOverTime1() { output.WriteLine("\n\n***************************** testSampleDataOverTime1 \n"); var time = new MockedTime(); var p = new HystrixRollingPercentile(time, TimeInMilliseconds, NumberOfBuckets, BucketDataLength, Enabled); int previousTime = 0; for (int i = 0; i < SampleDataHolder1.Data.Length; i++) { int timeInMillisecondsSinceStart = SampleDataHolder1.Data[i][0]; int latency = SampleDataHolder1.Data[i][1]; time.Increment(timeInMillisecondsSinceStart - previousTime); previousTime = timeInMillisecondsSinceStart; p.AddValue(latency); } output.WriteLine("0.01: " + p.GetPercentile(0.01)); output.WriteLine("Median: " + p.GetPercentile(50)); output.WriteLine("90th: " + p.GetPercentile(90)); output.WriteLine("99th: " + p.GetPercentile(99)); output.WriteLine("99.5th: " + p.GetPercentile(99.5)); output.WriteLine("99.99: " + p.GetPercentile(99.99)); output.WriteLine("Median: " + p.GetPercentile(50)); output.WriteLine("Median: " + p.GetPercentile(50)); output.WriteLine("Median: " + p.GetPercentile(50)); /* * In a loop as a use case was found where very different values were calculated in subsequent requests. */ for (int i = 0; i < 10; i++) { if (p.GetPercentile(50) > 5) { Assert.True(false, "We expect around 2 but got: " + p.GetPercentile(50)); } if (p.GetPercentile(99.5) < 20) { Assert.True(false, "We expect to see some high values over 20 but got: " + p.GetPercentile(99.5)); } } }
public void TestWriteThreadSafety() { var time = new MockedTime(); var p = new HystrixRollingPercentile(time, 100, 25, 1000, true); int num_threads = 10; int num_iterations = 1000; var latch = new CountdownEvent(num_threads); var r = new Random(); var added = new AtomicInteger(0); for (int i = 0; i < num_threads; i++) { var t = new Task( () => { for (int j = 1; j < (num_iterations / num_threads) + 1; j++) { int nextInt = r.Next(100); p.AddValue(nextInt); added.GetAndIncrement(); } latch.SignalEx(); }, CancellationToken.None, TaskCreationOptions.LongRunning); t.Start(); } try { latch.Wait(TimeSpan.FromSeconds(100)); Assert.Equal(added.Value, p._buckets.PeekLast._data.Length); } catch (Exception) { Assert.True(false, "Timeout on all threads writing percentiles"); } }
public void TestRolling() { var dateTimeProviderMock = new Mock <IDateTimeProvider>(); var currentTime = new DateTime(2017, 6, 26, 14, 0, 0).Ticks / TimeSpan.TicksPerMillisecond; dateTimeProviderMock.Setup(time => time.CurrentTimeInMilliseconds).Returns(currentTime); var configurationServiceMock = new Mock <IHystrixConfigurationService>(); configurationServiceMock.Setup(x => x.GetMetricsRollingPercentileEnabled()).Returns(true); HystrixRollingPercentile p = new HystrixRollingPercentile(dateTimeProviderMock.Object, 60000, 12, 1000, configurationServiceMock.Object); p.AddValue(1000); p.AddValue(1000); p.AddValue(1000); p.AddValue(2000); Assert.Equal(1, p.Buckets.Length); // no bucket turnover yet so percentile not yet generated Assert.Equal(0, p.GetPercentile(50)); currentTime += 6000; dateTimeProviderMock.Setup(time => time.CurrentTimeInMilliseconds).Returns(currentTime); // still only 1 bucket until we touch it again Assert.Equal(1, p.Buckets.Length); // a bucket has been created so we have a new percentile Assert.Equal(1000, p.GetPercentile(50)); // now 2 buckets since getting a percentile causes bucket retrieval Assert.Equal(2, p.Buckets.Length); p.AddValue(1000); p.AddValue(500); // should still be 2 buckets Assert.Equal(2, p.Buckets.Length); p.AddValue(200); p.AddValue(200); p.AddValue(1600); p.AddValue(200); p.AddValue(1600); p.AddValue(1600); // we haven't progressed to a new bucket so the percentile should be the same and ignore the most recent bucket Assert.Equal(1000, p.GetPercentile(50)); // increment to another bucket so we include all of the above in the PercentileSnapshot currentTime += 6000; dateTimeProviderMock.Setup(time => time.CurrentTimeInMilliseconds).Returns(currentTime); // the rolling version should have the same data as creating a snapshot like this PercentileSnapshot ps = new PercentileSnapshot(1000, 1000, 1000, 2000, 1000, 500, 200, 200, 1600, 200, 1600, 1600); Assert.Equal(ps.GetPercentile(0.15), p.GetPercentile(0.15)); Assert.Equal(ps.GetPercentile(0.50), p.GetPercentile(0.50)); Assert.Equal(ps.GetPercentile(0.90), p.GetPercentile(0.90)); Assert.Equal(ps.GetPercentile(0.995), p.GetPercentile(0.995)); // mean = 1000+1000+1000+2000+1000+500+200+200+1600+200+1600+1600/12 Assert.Equal(991, ps.GetMean()); }
public void TestRolling() { var time = new MockedTime(); var p = new HystrixRollingPercentile(time, TimeInMilliseconds, NumberOfBuckets, BucketDataLength, Enabled); p.AddValue(1000); p.AddValue(1000); p.AddValue(1000); p.AddValue(2000); Assert.Equal(1, p._buckets.Size); // no bucket turnover yet so percentile not yet generated Assert.Equal(0, p.GetPercentile(50)); time.Increment(6000); // still only 1 bucket until we touch it again Assert.Equal(1, p._buckets.Size); // a bucket has been created so we have a new percentile Assert.Equal(1000, p.GetPercentile(50)); // now 2 buckets since getting a percentile causes bucket retrieval Assert.Equal(2, p._buckets.Size); p.AddValue(1000); p.AddValue(500); // should still be 2 buckets Assert.Equal(2, p._buckets.Size); p.AddValue(200); p.AddValue(200); p.AddValue(1600); p.AddValue(200); p.AddValue(1600); p.AddValue(1600); // we haven't progressed to a new bucket so the percentile should be the same and ignore the most recent bucket Assert.Equal(1000, p.GetPercentile(50)); // Increment to another bucket so we include all of the above in the PercentileSnapshot time.Increment(6000); // the rolling version should have the same data as creating a snapshot like this var ps = new PercentileSnapshot(1000, 1000, 1000, 2000, 1000, 500, 200, 200, 1600, 200, 1600, 1600); Assert.Equal(ps.GetPercentile(0.15), p.GetPercentile(0.15)); Assert.Equal(ps.GetPercentile(0.50), p.GetPercentile(0.50)); Assert.Equal(ps.GetPercentile(0.90), p.GetPercentile(0.90)); Assert.Equal(ps.GetPercentile(0.995), p.GetPercentile(0.995)); output.WriteLine("100th: " + ps.GetPercentile(100) + " " + p.GetPercentile(100)); output.WriteLine("99.5th: " + ps.GetPercentile(99.5) + " " + p.GetPercentile(99.5)); output.WriteLine("99th: " + ps.GetPercentile(99) + " " + p.GetPercentile(99)); output.WriteLine("90th: " + ps.GetPercentile(90) + " " + p.GetPercentile(90)); output.WriteLine("50th: " + ps.GetPercentile(50) + " " + p.GetPercentile(50)); output.WriteLine("10th: " + ps.GetPercentile(10) + " " + p.GetPercentile(10)); // mean = 1000+1000+1000+2000+1000+500+200+200+1600+200+1600+1600/12 Assert.Equal(991, ps.Mean); }