/// <summary> /// Get a copy of this histogram, corrected for coordinated omission. /// </summary> /// <param name="expectedIntervalBetweenValueSamples">If <paramref name="expectedIntervalBetweenValueSamples"/> is larger than 0, add auto-generated value records as appropriate if value is larger than <c>expectedIntervalBetweenValueSamples</c></param> /// <returns>a copy of this histogram, corrected for coordinated omission.</returns> /// <remarks> /// To compensate for the loss of sampled values when a recorded value is larger than the expected interval between value samples, /// the new histogram will include an auto-generated additional series of decreasingly-smaller(down to the <paramref name="expectedIntervalBetweenValueSamples"/>) /// value records for each count found in the current histogram that is larger than the expectedIntervalBetweenValueSamples. /// <para> /// Note: This is a post-correction method, as opposed to the at-recording correction method provided by <seealso cref="HistogramBase.RecordValueWithExpectedInterval"/>. /// The two methods are mutually exclusive, and only one of the two should be be used on a given data set to correct for the same coordinated omission issue. /// </para> /// See notes in the description of the Histogram calls for an illustration of why this corrective behavior is important. /// </remarks> public override HistogramBase CopyCorrectedForCoordinatedOmission(long expectedIntervalBetweenValueSamples) { var toHistogram = new LongHistogram(LowestTrackableValue, HighestTrackableValue, NumberOfSignificantValueDigits); toHistogram.AddWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return(toHistogram); }
/// <summary> /// Create a copy of this histogram, complete with data and everything. /// </summary> /// <returns>A distinct copy of this histogram.</returns> public override HistogramBase Copy() { var copy = new LongHistogram(LowestTrackableValue, HighestTrackableValue, NumberOfSignificantValueDigits); copy.Add(this); return(copy); }
public LatencyTestSessionResult(LongHistogram histogram, TimeSpan duration, int gen0, int gen1, int gen2) { Histogram = histogram; Duration = duration; Gen0 = gen0; Gen1 = gen1; Gen2 = gen2; }
public void Run() { _test = (ILatencyTest)Activator.CreateInstance(_perfTestType); CheckProcessorsRequirements(_test); Console.WriteLine("Starting latency tests"); var stopwatch = new Stopwatch(); var histogram = new LongHistogram(10000000000L, 4); for (var i = 0; i < Runs; i++) { stopwatch.Reset(); histogram.Reset(); GC.Collect(); GC.WaitForPendingFinalizers(); var beforeGen0Count = GC.CollectionCount(0); var beforeGen1Count = GC.CollectionCount(1); var beforeGen2Count = GC.CollectionCount(2); Exception exception = null; LatencyTestSessionResult result = null; try { _test.Run(stopwatch, histogram); } catch (Exception ex) { exception = ex; } if (exception != null) { result = new LatencyTestSessionResult(exception); } else { var gen0Count = GC.CollectionCount(0) - beforeGen0Count; var gen1Count = GC.CollectionCount(1) - beforeGen1Count; var gen2Count = GC.CollectionCount(2) - beforeGen2Count; result = new LatencyTestSessionResult(histogram, stopwatch.Elapsed, gen0Count, gen1Count, gen2Count); } Console.WriteLine(result); _results.Add(result); } }
/// <summary> /// Create a copy of this histogram, complete with data and everything. /// </summary> /// <returns>A distinct copy of this histogram.</returns> public override HistogramBase Copy() { var copy = new LongHistogram(LowestTrackableValue, HighestTrackableValue, NumberOfSignificantValueDigits); copy.Add(this); return copy; }
/// <summary> /// Get a copy of this histogram, corrected for coordinated omission. /// </summary> /// <param name="expectedIntervalBetweenValueSamples">If <paramref name="expectedIntervalBetweenValueSamples"/> is larger than 0, add auto-generated value records as appropriate if value is larger than <c>expectedIntervalBetweenValueSamples</c></param> /// <returns>a copy of this histogram, corrected for coordinated omission.</returns> /// <remarks> /// To compensate for the loss of sampled values when a recorded value is larger than the expected interval between value samples, /// the new histogram will include an auto-generated additional series of decreasingly-smaller(down to the <paramref name="expectedIntervalBetweenValueSamples"/>) /// value records for each count found in the current histogram that is larger than the expectedIntervalBetweenValueSamples. /// <para> /// Note: This is a post-correction method, as opposed to the at-recording correction method provided by <seealso cref="HistogramBase.RecordValueWithExpectedInterval"/>. /// The two methods are mutually exclusive, and only one of the two should be be used on a given data set to correct for the same coordinated omission issue. /// </para> /// See notes in the description of the Histogram calls for an illustration of why this corrective behavior is important. /// </remarks> public override HistogramBase CopyCorrectedForCoordinatedOmission(long expectedIntervalBetweenValueSamples) { var toHistogram = new LongHistogram(LowestTrackableValue, HighestTrackableValue, NumberOfSignificantValueDigits); toHistogram.AddWhileCorrectingForCoordinatedOmission(this, expectedIntervalBetweenValueSamples); return toHistogram; }
public void MimallocAllocFreeCallLatency() { Mem.OptionSetEnabled(Mem.Option.EagerCommit, true); Mem.OptionSetEnabled(Mem.Option.LargeOsPages, true); Mem.OptionSetEnabled(Mem.Option.ResetDecommits, true); Mem.OptionSetEnabled(Mem.Option.PageReset, true); Mem.OptionSetEnabled(Mem.Option.SegmentReset, true); Mem.OptionSetEnabled(Mem.Option.AbandonedPageReset, true); // Mem.OptionSet(Mem.Option.ResetDelay, 0); Mem.OptionSetEnabled(Mem.Option.EagerRegionCommit, true); Mem.RegisterOutput((str, arg) => { Console.Write(str); }, null); var rng = new Random(); var allocated = 0L; var h = Mem.HeapNew(); var count = 100_000L; var size = 32 * 4096; IntPtr[] ptrs = new IntPtr[count]; for (int r = 0; r < 4; r++) { var histogram = new HdrHistogram.LongHistogram(1, 1000000, 1); using (Benchmark.Run("AllocFree" + r, count)) { for (int i = 0; i < count; i++) { var x = rng.NextDouble(); var start = Stopwatch.GetTimestamp(); if (allocated < 1000) { Allocate(); } else if (allocated > 2000) { Free(); } else if (((2000 - allocated) / 1000.0 * x) > 0.5) { Allocate(); } else { Free(); } var time = Stopwatch.GetTimestamp() - start; histogram.RecordValue(time); void Allocate() { ptrs[allocated] = (IntPtr)Mem.HeapMalloc(h, (UIntPtr)size); for (int j = 0; j < size; j += 4096) { ((byte *)ptrs[allocated])[j] = 0; } ((byte *)ptrs[allocated])[size - 1] = 0; allocated++; } void Free() { Mem.Free((byte *)ptrs[allocated - 1]); allocated--; } } } histogram.OutputPercentileDistribution(Console.Out, outputValueUnitScalingRatio: OutputScalingFactor.TimeStampToMicroseconds); } Mem.StatsPrint(); }