public void Given_1000_Events_ShouldSampleEvent_Returns_True_Every_Nth_Event(SampleEvery samplingRate, int expectedEvents) { var eventsSampled = 0; var sr = new SamplingRate(samplingRate); for (int i = 0; i < 1_000; i++) { if (sr.ShouldSampleEvent()) { eventsSampled++; } } Assert.That(eventsSampled, Is.EqualTo(expectedEvents)); }
public PrometheusMetricsProvider AddDotNetRuntimeStats( SampleEvery contentionSampleRate = SampleEvery.TenEvents, SampleEvery jitSampleRate = SampleEvery.HundredEvents, SampleEvery threadScheduleSampleRate = SampleEvery.OneEvent) { if (_collector == null) { _collector = DotNetRuntimeStatsBuilder .Customize() .WithContentionStats(contentionSampleRate) .WithJitStats(jitSampleRate) .WithThreadPoolSchedulingStats(null, threadScheduleSampleRate) .WithThreadPoolStats() .WithGcStats() .StartCollecting(); } return(this); }
public PrometheusMetricsProvider AddDotNetRuntimeStats( CaptureLevel contentionCaptureLevel = CaptureLevel.Counters, SampleEvery contentionSampleRate = SampleEvery.TenEvents, CaptureLevel jitCaptureLevel = CaptureLevel.Counters, SampleEvery jitSampleRate = SampleEvery.HundredEvents, CaptureLevel threadPoolCaptureLevel = CaptureLevel.Counters, ThreadPoolMetricsProducer.Options threadPoolOptions = null) { if (_collector == null) { _collector = DotNetRuntimeStatsBuilder .Customize() .WithContentionStats(contentionCaptureLevel, contentionSampleRate) .WithJitStats(jitCaptureLevel, jitSampleRate) .WithThreadPoolStats(threadPoolCaptureLevel, threadPoolOptions) .WithGcStats() .StartCollecting(); } return(this); }
public SamplingRate(SampleEvery every) { SampleEvery = (int)every; _next = 0L; }
/// <summary> /// Include metrics around the volume of work scheduled on the worker thread pool /// and the scheduling delays. /// </summary> /// <param name="histogramBuckets">Buckets for the scheduling delay histogram</param> /// <param name="sampleRate"> /// The sampling rate for thread pool scheduling events. A lower sampling rate reduces memory use /// but reduces the accuracy of metrics produced (as a percentage of events are discarded). /// If your application achieves a high level of throughput (thousands of work items scheduled per second on /// the thread pool), it's recommend to reduce the sampling rate even further. /// </param> public Builder WithThreadPoolSchedulingStats(double[] histogramBuckets = null, SampleEvery sampleRate = SampleEvery.TenEvents) { StatsCollectors.AddOrReplace(new ThreadPoolSchedulingStatsCollector(histogramBuckets ?? Constants.DefaultHistogramBuckets, sampleRate)); return(this); }
/// <summary> /// Include metrics summarizing the volume of methods being compiled /// by the Just-In-Time compiler. /// </summary> /// <param name="sampleRate"> /// The sampling rate for JIT events. A lower sampling rate reduces memory use /// but reduces the accuracy of metrics produced (as a percentage of events are discarded). /// If your application achieves a high level of throughput (thousands of work items scheduled per second on /// the thread pool), it's recommend to reduce the sampling rate even further. /// </param> public Builder WithJitStats(SampleEvery sampleRate = SampleEvery.TenEvents) { StatsCollectors.AddOrReplace(new JitStatsCollector(sampleRate)); return(this); }
/// <summary> /// Include metrics around volume of locks contended. /// </summary> /// <param name="sampleRate"> /// The sampling rate for contention events (defaults to 100%). A lower sampling rate reduces memory use /// but reduces the accuracy of metrics produced (as a percentage of events are discarded). /// </param> public Builder WithContentionStats(SampleEvery sampleRate = SampleEvery.TwoEvents) { StatsCollectors.AddOrReplace(new ContentionStatsCollector(sampleRate)); return(this); }
public void SampleEvery_Reflects_The_Ratio_Of_Every_100_Events_That_Will_Be_Sampled(SampleEvery samplingRate, int expected) { var sr = new SamplingRate(samplingRate); Assert.That(sr.SampleEvery, Is.EqualTo(expected)); }