public void OnShutdown() { var stop = Stopwatch.GetTimestamp(); var elapsed = (stop - _start) / OutputScalingFactor.TimeStampToMilliseconds; var p50 = _latencyHistogram.GetValueAtPercentile(50) / OutputScalingFactor.TimeStampToMicroseconds; var p90 = _latencyHistogram.GetValueAtPercentile(90) / OutputScalingFactor.TimeStampToMicroseconds; var p99 = _latencyHistogram.GetValueAtPercentile(99) / OutputScalingFactor.TimeStampToMicroseconds; var latencies = FormattableString.Invariant($"{_entrySize},{p50:0.000},{p90:0.000},{p99:0.000},{elapsed:0}{Environment.NewLine}"); Console.Write(latencies); File.AppendAllText("Latencies.txt", latencies); }
public void TestGetValueAtPercentile() { Assert.AreEqual(1000.0, (double)RawHistogram.GetValueAtPercentile(30.0), 1000.0 * 0.001, "raw 30%'ile is 1 msec +/- 0.1%"); Assert.AreEqual(1000.0, (double)RawHistogram.GetValueAtPercentile(99.0), 1000.0 * 0.001, "raw 99%'ile is 1 msec +/- 0.1%"); Assert.AreEqual(1000.0, (double)RawHistogram.GetValueAtPercentile(99.99), 1000.0 * 0.001, "raw 99.99%'ile is 1 msec +/- 0.1%"); Assert.AreEqual(100000000.0, (double)RawHistogram.GetValueAtPercentile(99.999), 100000000.0 * 0.001, "raw 99.999%'ile is 100 sec +/- 0.1%"); Assert.AreEqual(100000000.0, (double)RawHistogram.GetValueAtPercentile(100.0), 100000000.0 * 0.001, "raw 100%'ile is 100 sec +/- 0.1%"); Assert.AreEqual(1000.0, (double)LongHistogram.GetValueAtPercentile(30.0), 1000.0 * 0.001, "30%'ile is 1 msec +/- 0.1%"); Assert.AreEqual(1000.0, (double)LongHistogram.GetValueAtPercentile(50.0), 1000.0 * 0.001, "50%'ile is 1 msec +/- 0.1%"); Assert.AreEqual(50000000.0, (double)LongHistogram.GetValueAtPercentile(75.0), 50000000.0 * 0.001, "75%'ile is 50 sec +/- 0.1%"); Assert.AreEqual(80000000.0, (double)LongHistogram.GetValueAtPercentile(90.0), 80000000.0 * 0.001, "90%'ile is 80 sec +/- 0.1%"); Assert.AreEqual(98000000.0, (double)LongHistogram.GetValueAtPercentile(99.0), 98000000.0 * 0.001, "99%'ile is 98 sec +/- 0.1%"); Assert.AreEqual(100000000.0, (double)LongHistogram.GetValueAtPercentile(99.999), 100000000.0 * 0.001, "99.999%'ile is 100 sec +/- 0.1%"); Assert.AreEqual(100000000.0, (double)LongHistogram.GetValueAtPercentile(100.0), 100000000.0 * 0.001, "100%'ile is 100 sec +/- 0.1%"); }
private static void ProcessHistogrmaResults(LongHistogram histogram) { Console.WriteLine(); var percentiles = new[] { 50.0, 90.0, 95.0, 99.9, 99.99, 99.999, 99.9999, 99.99999, 99.999999, 100.0 }; foreach (var percentile in percentiles) { var value = histogram.GetValueAtPercentile(percentile) / OutputScalingFactor.TimeStampToMilliseconds; Console.WriteLine($"{percentile,10:##.######}th Percentile : {value,9:N4} ms"); } Console.WriteLine( $" Max : {histogram.GetMaxValue() / OutputScalingFactor.TimeStampToMilliseconds,9:N4} ms"); var fileName = "HistogramResults.hgrm"; if (File.Exists(fileName)) { File.Delete(fileName); } using (var writer = new StreamWriter(fileName)) { histogram.OutputPercentileDistribution(writer, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds); } }
public void CanReadv1Logs_Skip_PreStart(string logPath, int skip, int take, int expectedHistogramCount, int expectedCombinedValueCount, int expectedCombined999, long expectedCombinedMaxLength, double expectedStartTime) { var readerStream = GetEmbeddedFileStream(logPath); var reader = new HistogramLogReader(readerStream); int histogramCount = 0; long totalCount = 0; HistogramBase accumulatedHistogram = new LongHistogram(3600L * 1000 * 1000 * 1000, 3); var histograms = reader.ReadHistograms() .Where(h => h.StartTimeStamp >= reader.GetStartTime().MillisecondsSinceUnixEpoch()) .Skip(skip) .Take(take); foreach (var histogram in histograms) { histogramCount++; totalCount += histogram.TotalCount; accumulatedHistogram.Add(histogram); } Assert.AreEqual(expectedHistogramCount, histogramCount); Assert.AreEqual(expectedCombinedValueCount, totalCount); Assert.AreEqual(expectedCombined999, accumulatedHistogram.GetValueAtPercentile(99.9)); Assert.AreEqual(expectedCombinedMaxLength, accumulatedHistogram.GetMaxValue()); Assert.AreEqual(expectedStartTime, reader.GetStartTime().SecondsSinceUnixEpoch()); }
public void TestGetValueAtPercentileForLargeHistogram() { const long largestValue = 1000000000000L; var h = new LongHistogram(largestValue, 5); h.RecordValue(largestValue); Assert.That(h.GetValueAtPercentile(100.0) > 0); }
static void Report(string name, LongHistogram histogram, TimeSpan?maximumMean) { Console.Out.WriteLine($"Histogram for {name}:"); histogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMilliseconds, percentileTicksPerHalfDistance: 1); Console.Out.WriteLine(); if (maximumMean != null) { var max = maximumMean.Value; var actualMean = TimeSpan.FromMilliseconds(histogram.GetValueAtPercentile(50) / OutputScalingFactor.TimeStampToMilliseconds); Assert.LessOrEqual(actualMean, max, $"The actual mean for {name} was '{actualMean}' and was bigger than maximum allowed mean '{max}'."); } }
private static void PrintStatistics() { Console.WriteLine(); Console.WriteLine($"| | Round trip | Encode | Decode |"); Console.WriteLine($"|---------|---------------|---------------|---------------|"); Console.WriteLine($"| min | {$"{Rtt.GetValueAtPercentile( 00.00) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 00.00) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 00.00) / 10m:N}",10} µs |"); Console.WriteLine($"| 50.00% | {$"{Rtt.GetValueAtPercentile( 50.00) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 50.00) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 50.00) / 10m:N}",10} µs |"); Console.WriteLine($"| 90.00% | {$"{Rtt.GetValueAtPercentile( 90.00) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 90.00) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 90.00) / 10m:N}",10} µs |"); Console.WriteLine($"| 99.00% | {$"{Rtt.GetValueAtPercentile( 99.00) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 99.00) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 99.00) / 10m:N}",10} µs |"); Console.WriteLine($"| 99.90% | {$"{Rtt.GetValueAtPercentile( 99.90) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 99.90) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 99.90) / 10m:N}",10} µs |"); Console.WriteLine($"| 99.99% | {$"{Rtt.GetValueAtPercentile( 99.99) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile( 99.99) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile( 99.99) / 10m:N}",10} µs |"); Console.WriteLine($"| max | {$"{Rtt.GetValueAtPercentile(100.00) / 10m:N}",10} µs | {$"{Encode.GetValueAtPercentile(100.00) / 10m:N}",10} µs | {$"{Decode.GetValueAtPercentile(100.00) / 10m:N}",10} µs |"); Console.WriteLine(); Console.WriteLine($"GC 0: {GC0}"); Console.WriteLine($"GC 1: {GC1}"); Console.WriteLine($"GC 2: {GC2}"); }
public void CanReadv2Logs(string logPath) { var readerStream = GetEmbeddedFileStream(logPath); var reader = new HistogramLogReader(readerStream); int histogramCount = 0; long totalCount = 0; var accumulatedHistogram = new LongHistogram(85899345920838, 3); foreach (var histogram in reader.ReadHistograms()) { histogramCount++; Assert.IsInstanceOf <HistogramBase>(histogram, "Expected integer value histograms in log file"); totalCount += histogram.TotalCount; accumulatedHistogram.Add(histogram); } Assert.AreEqual(62, histogramCount); Assert.AreEqual(48761, totalCount); Assert.AreEqual(1745879039, accumulatedHistogram.GetValueAtPercentile(99.9)); Assert.AreEqual(1796210687, accumulatedHistogram.GetMaxValue()); Assert.AreEqual(1441812279.474, reader.GetStartTime().SecondsSinceUnixEpoch()); }
public void TestScalingEquivalence() { Assert.AreEqual( LongHistogram.GetMean() * 512, ScaledHistogram.GetMean(), ScaledHistogram.GetMean() * 0.000001, "averages should be equivalent"); Assert.AreEqual( LongHistogram.TotalCount, ScaledHistogram.TotalCount, "total count should be the same"); Assert.AreEqual( LongHistogram.LowestEquivalentValue(LongHistogram.GetValueAtPercentile(99.0)) * 512, ScaledHistogram.LowestEquivalentValue(ScaledHistogram.GetValueAtPercentile(99.0)), "99%'iles should be equivalent"); Assert.AreEqual( ScaledHistogram.HighestEquivalentValue(LongHistogram.GetMaxValue() * 512), ScaledHistogram.GetMaxValue(), "Max should be equivalent for scaled data"); // Same for post-corrected: Assert.AreEqual( LongHistogram.GetMean() * 512, ScaledHistogram.GetMean(), ScaledHistogram.GetMean() * 0.000001, "averages should be equivalent"); Assert.AreEqual( PostCorrectedHistogram.TotalCount, PostCorrectedScaledHistogram.TotalCount, "total count should be the same"); Assert.AreEqual( PostCorrectedHistogram.LowestEquivalentValue(PostCorrectedHistogram.GetValueAtPercentile(99.0)) * 512, PostCorrectedScaledHistogram.LowestEquivalentValue(PostCorrectedScaledHistogram.GetValueAtPercentile(99.0)), "99%'iles should be equivalent"); Assert.AreEqual( PostCorrectedScaledHistogram.HighestEquivalentValue(PostCorrectedHistogram.GetMaxValue() * 512), PostCorrectedScaledHistogram.GetMaxValue(), "Max should be equivalent for post-corrected data"); }
private CachedValuesHistogram(LongHistogram underlying) { /* * Single thread calculates a variety of commonly-accessed quantities. * This way, all threads can access the cached values without synchronization * Synchronization is only required for values that are not cached */ if (underlying.TotalCount > 0) { mean = (int)underlying.GetMean(); p0 = (int)underlying.GetValueAtPercentile(0); p5 = (int)underlying.GetValueAtPercentile(5); p10 = (int)underlying.GetValueAtPercentile(10); p15 = (int)underlying.GetValueAtPercentile(15); p20 = (int)underlying.GetValueAtPercentile(20); p25 = (int)underlying.GetValueAtPercentile(25); p30 = (int)underlying.GetValueAtPercentile(30); p35 = (int)underlying.GetValueAtPercentile(35); p40 = (int)underlying.GetValueAtPercentile(40); p45 = (int)underlying.GetValueAtPercentile(45); p50 = (int)underlying.GetValueAtPercentile(50); p55 = (int)underlying.GetValueAtPercentile(55); p60 = (int)underlying.GetValueAtPercentile(60); p65 = (int)underlying.GetValueAtPercentile(65); p70 = (int)underlying.GetValueAtPercentile(70); p75 = (int)underlying.GetValueAtPercentile(75); p80 = (int)underlying.GetValueAtPercentile(80); p85 = (int)underlying.GetValueAtPercentile(85); p90 = (int)underlying.GetValueAtPercentile(90); p95 = (int)underlying.GetValueAtPercentile(95); p99 = (int)underlying.GetValueAtPercentile(99); p99_5 = (int)underlying.GetValueAtPercentile(99.5); p99_9 = (int)underlying.GetValueAtPercentile(99.9); p99_95 = (int)underlying.GetValueAtPercentile(99.95); p99_99 = (int)underlying.GetValueAtPercentile(99.99); p100 = (int)underlying.GetValueAtPercentile(100); totalCount = underlying.TotalCount; } }
public void CanReadv2Logs(string logPath) { var readerStream = GetEmbeddedFileStream(logPath); var reader = new HistogramLogReader(readerStream); int histogramCount = 0; long totalCount = 0; var accumulatedHistogram = new LongHistogram(85899345920838, 3); foreach (var histogram in reader.ReadHistograms()) { histogramCount++; Assert.IsInstanceOf<HistogramBase>(histogram, "Expected integer value histograms in log file"); totalCount += histogram.TotalCount; accumulatedHistogram.Add(histogram); } Assert.AreEqual(62, histogramCount); Assert.AreEqual(48761, totalCount); Assert.AreEqual(1745879039, accumulatedHistogram.GetValueAtPercentile(99.9)); Assert.AreEqual(1796210687, accumulatedHistogram.GetMaxValue()); Assert.AreEqual(1441812279.474, reader.GetStartTime().SecondsSinceUnixEpoch()); }