static HistogramDataAccessTest() { histogram = new Histogram(highestTrackableValue, numberOfSignificantValueDigits); scaledHistogram = new Histogram(1000, highestTrackableValue * 512, numberOfSignificantValueDigits); rawHistogram = new Histogram(highestTrackableValue, numberOfSignificantValueDigits); scaledRawHistogram = new Histogram(1000, highestTrackableValue * 512, numberOfSignificantValueDigits); // Log hypothetical scenario: 100 seconds of "perfect" 1msec results, sampled // 100 times per second (10,000 results), followed by a 100 second pause with // a single (100 second) recorded result. Recording is done indicating an expected // interval between samples of 10 msec: for (int i = 0; i < 10000; i++) { histogram.recordValueWithExpectedInterval(1000 /* 1 msec */, 10000 /* 10 msec expected interval */); scaledHistogram.recordValueWithExpectedInterval(1000 * 512 /* 1 msec */, 10000 * 512 /* 10 msec expected interval */); rawHistogram.recordValue(1000 /* 1 msec */); scaledRawHistogram.recordValue(1000 * 512/* 1 msec */); } histogram.recordValueWithExpectedInterval(100000000L /* 100 sec */, 10000 /* 10 msec expected interval */); scaledHistogram.recordValueWithExpectedInterval(100000000L * 512 /* 100 sec */, 10000 * 512 /* 10 msec expected interval */); rawHistogram.recordValue(100000000L /* 100 sec */); scaledRawHistogram.recordValue(100000000L * 512 /* 100 sec */); postCorrectedHistogram = rawHistogram.copyCorrectedForCoordinatedOmission(10000 /* 10 msec expected interval */); postCorrectedScaledHistogram = scaledRawHistogram.copyCorrectedForCoordinatedOmission(10000 * 512 /* 10 msec expected interval */); }
// ReSharper disable InconsistentNaming public new void add(AbstractHistogram otherHistogram) // ReSharper restore InconsistentNaming { // Synchronize add(). Avoid deadlocks by synchronizing in order of construction identity count. if (Identity < otherHistogram.Identity) { lock (this) { lock (otherHistogram) { base.add(otherHistogram); } } } else { lock (otherHistogram) { lock (this) { base.add(otherHistogram); } } } }
public /*private*/ LogarithmicBucketValues(/*final*/ AbstractHistogram histogram, /*final*/ int valueUnitsInFirstBucket, /*final*/ double logBase) { this.histogram = histogram; this.valueUnitsInFirstBucket = valueUnitsInFirstBucket; this.logBase = logBase; }
void recordLoopWithExpectedInterval(AbstractHistogram histogram, long loopCount, long expectedInterval) { for (long i = 0; i < loopCount; i++) { histogram.recordValueWithExpectedInterval(testValueLevel + (i & 0x8000), expectedInterval); } }
static HistogramDataTest() { histogram = new Histogram(highestTrackableValue, numberOfSignificantValueDigits); scaledHistogram = new Histogram(1000, highestTrackableValue * 512, numberOfSignificantValueDigits); rawHistogram = new Histogram(highestTrackableValue, numberOfSignificantValueDigits); scaledRawHistogram = new Histogram(1000, highestTrackableValue * 512, numberOfSignificantValueDigits); // Log hypothetical scenario: 100 seconds of "perfect" 1msec results, sampled // 100 times per second (10,000 results), followed by a 100 second pause with // a single (100 second) recorded result. Recording is done indicating an expected // interval between samples of 10 msec: for (int i = 0; i < 10000; i++) { histogram.recordValueWithExpectedInterval(1000 /* 1 msec */, 10000 /* 10 msec expected interval */); scaledHistogram.recordValueWithExpectedInterval(1000 * 512 /* 1 msec */, 10000 * 512 /* 10 msec expected interval */); rawHistogram.recordValue(1000 /* 1 msec */); scaledRawHistogram.recordValue(1000 * 512 /* 1 msec */); } histogram.recordValueWithExpectedInterval(100000000L /* 100 sec */, 10000 /* 10 msec expected interval */); scaledHistogram.recordValueWithExpectedInterval(100000000L * 512 /* 100 sec */, 10000 * 512 /* 10 msec expected interval */); rawHistogram.recordValue(100000000L /* 100 sec */); scaledRawHistogram.recordValue(100000000L * 512 /* 100 sec */); postCorrectedHistogram = rawHistogram.copyCorrectedForCoordinatedOmission(10000 /* 10 msec expected interval */); postCorrectedScaledHistogram = scaledRawHistogram.copyCorrectedForCoordinatedOmission(10000 * 512 /* 10 msec expected interval */); }
private void reset(/*final*/ AbstractHistogram histogram, /*final*/ long valueUnitsPerBucket) { base.resetIterator(histogram); this.valueUnitsPerBucket = valueUnitsPerBucket; this.nextValueReportingLevel = valueUnitsPerBucket; this.nextValueReportingLevelLowestEquivalent = histogram.lowestEquivalentValue(this.nextValueReportingLevel); }
private static void RecordLoop(AbstractHistogram histogram, long loopCount) { for (long i = 0; i < loopCount; i++) { histogram.recordValue(TestValueLevel + (i & 0x8000)); } }
internal HistogramData(/*final*/ AbstractHistogram histogram) { this.histogram = histogram; this.percentileIterator = new PercentileIterator(histogram, 1); this.recordedValuesIterator = new RecordedValuesIterator(histogram); this.bucketCount = histogram.bucketCount; this.subBucketCount = histogram.subBucketCount; }
protected Histogram(AbstractHistogram source, bool allocateCountsArray) : base(source) { if (allocateCountsArray) { counts = new long[countsArrayLength]; } }
private void reset(/*final*/ AbstractHistogram histogram, /*final*/ int valueUnitsInFirstBucket, /*final*/ double logBase) { base.resetIterator(histogram); this.logBase = logBase; this.valueUnitsInFirstBucket = valueUnitsInFirstBucket; this.nextValueReportingLevel = valueUnitsInFirstBucket; this.nextValueReportingLevelLowestEquivalent = histogram.lowestEquivalentValue(this.nextValueReportingLevel); }
private void reset(/*final*/ AbstractHistogram histogram, /*final*/ int percentileTicksPerHalfDistance) { base.resetIterator(histogram); this.percentileTicksPerHalfDistance = percentileTicksPerHalfDistance; this.percentileLevelToIterateTo = 0.0; this.percentileLevelToIterateFrom = 0.0; this.reachedLastRecordedValue = false; }
public HdrSnapshot(AbstractHistogram histogram, long minValue, string minUserValue, long maxValue, string maxUserValue) { this.histogram = histogram; this.Min = minValue; this.MinUserValue = minUserValue; this.Max = maxValue; this.MaxUserValue = maxUserValue; }
/// <summary> /// Initializes a new instance of the <see cref="HdrSnapshot"/> class. /// </summary> /// <param name="histogram">The histogram.</param> /// <param name="minValue">The minimum value.</param> /// <param name="minUserValue">The minimum user value.</param> /// <param name="maxValue">The maximum value.</param> /// <param name="maxUserValue">The maximum user value.</param> public HdrSnapshot(AbstractHistogram histogram, long minValue, string minUserValue, long maxValue, string maxUserValue) { _histogram = histogram; Min = minUserValue.IsPresent() ? minValue : _histogram.getMinValue(); MinUserValue = minUserValue; Max = maxUserValue.IsPresent() ? maxValue : _histogram.getMaxValue(); MaxUserValue = maxUserValue; }
private void testRawRecordingSpeedAtExpectedInterval(String label, AbstractHistogram histogram, long expectedInterval, long timingLoopCount, bool assertNoGC = true, bool multiThreaded = false) { Console.WriteLine("\nTiming recording speed with expectedInterval = " + expectedInterval + " :"); // Warm up: var timer = Stopwatch.StartNew(); recordLoopWithExpectedInterval(histogram, warmupLoopLength, expectedInterval); timer.Stop(); // 1 millisecond (ms) = 1000 microsoecond (µs or usec) // 1 microsecond (µs or usec) = 1000 nanosecond (ns or nsec) // 1 second = 1,000,000 usec or 1,000 ms long deltaUsec = timer.ElapsedMilliseconds * 1000L; long rate = 1000000 * warmupLoopLength / deltaUsec; Console.WriteLine("{0}Warmup:\n{1:N0} value recordings completed in {2:N0} usec, rate = {3:N0} value recording calls per sec.", label, warmupLoopLength, deltaUsec, rate); histogram.reset(); // Wait a bit to make sure compiler had a chance to do it's stuff: try { Thread.Sleep(1000); } catch (Exception) { } var gcBefore = PrintGCAndMemoryStats("GC Before"); timer = Stopwatch.StartNew(); recordLoopWithExpectedInterval(histogram, timingLoopCount, expectedInterval); timer.Stop(); var gcAfter = PrintGCAndMemoryStats("GC After "); deltaUsec = timer.ElapsedMilliseconds * 1000L; rate = 1000000 * timingLoopCount / deltaUsec; Console.WriteLine(label + "Hot code timing:"); Console.WriteLine("{0}{1:N0} value recordings completed in {2:N0} usec, rate = {3:N0} value recording calls per sec.", label, timingLoopCount, deltaUsec, rate); if (multiThreaded == false) { rate = 1000000 * histogram.getTotalCount() / deltaUsec; Console.WriteLine("{0}{1:N0} raw recorded entries completed in {2:N0} usec, rate = {3:N0} recorded values per sec.", label, histogram.getTotalCount(), deltaUsec, rate); } if (assertNoGC) { //// TODO work out why we always seems to get at least 1 GC here, maybe it's due to the length of the test run?? //Assert.LessOrEqual(gcAfter.Item1 - gcBefore.Item1, 1, "There should be at MOST 1 Gen1 GC Collections"); //Assert.LessOrEqual(gcAfter.Item2 - gcBefore.Item2, 1, "There should be at MOST 1 Gen2 GC Collections"); //Assert.LessOrEqual(gcAfter.Item3 - gcBefore.Item3, 1, "There should be at MOST 1 Gen3 GC Collections"); // TODO work out why we always seems to get at least 1 GC here, maybe it's due to the length of the test run?? Assert.LessOrEqual(gcAfter.Gen1 - gcBefore.Gen1, 1, "There should be at MOST 1 Gen1 GC Collections"); Assert.LessOrEqual(gcAfter.Gen1 - gcBefore.Gen1, 1, "There should be at MOST 1 Gen2 GC Collections"); Assert.LessOrEqual(gcAfter.Gen3 - gcBefore.Gen3, 1, "There should be at MOST 1 Gen3 GC Collections"); } }
/** * Construct a histogram with the same range settings as a given source histogram, * duplicating the source's start/end timestamps (but NOT it's contents) * @param source The source histogram to duplicate */ public ConcurrentHistogram(AbstractHistogram source) : base(source, false) { activeCounts = new AtomicLongArray(countsArrayLength); activeCountsNormalizingIndexOffset = 0; inactiveCounts = new AtomicLongArray(countsArrayLength); inactiveCountsNormalizingIndexOffset = 0; }
//void testAbstractSerialization(AbstractHistogram histogram) throws Exception { // histogram.recordValue(testValueLevel); // histogram.recordValue(testValueLevel * 10); // histogram.recordValueWithExpectedInterval(histogram.getHighestTrackableValue() - 1, 31); // ByteArrayOutputStream bos = new ByteArrayOutputStream(); // ObjectOutput out = null; // ByteArrayInputStream bis = null; // ObjectInput in = null; // AbstractHistogram newHistogram = null; // try { // out = new ObjectOutputStream(bos); // out.writeObject(histogram); // Deflater compresser = new Deflater(); // compresser.setInput(bos.toByteArray()); // compresser.finish(); // byte [] compressedOutput = new byte[1024*1024]; // int compressedDataLength = compresser.deflate(compressedOutput); // Console.WriteLine("Serialized form of " + histogram.getClass() + " with highestTrackableValue = " + // histogram.getHighestTrackableValue() + "\n and a numberOfSignificantValueDigits = " + // histogram.getNumberOfSignificantValueDigits() + " is " + bos.toByteArray().length + // " bytes long. Compressed form is " + compressedDataLength + " bytes long."); // Console.WriteLine(" (estimated footprint was " + histogram.getEstimatedFootprintInBytes() + " bytes)"); // bis = new ByteArrayInputStream(bos.toByteArray()); // in = new ObjectInputStream(bis); // newHistogram = (AbstractHistogram) in.readObject(); // } finally { // if (out != null) out.close(); // bos.close(); // if (in !=null) in.close(); // if (bis != null) bis.close(); // } // Assert.assertNotNull(newHistogram); // assertEqual(histogram, newHistogram); //} private void assertEqual(AbstractHistogram expectedHistogram, AbstractHistogram actualHistogram) { Assert.assertEquals(expectedHistogram, actualHistogram); Assert.assertEquals( expectedHistogram.getCountAtValue(testValueLevel), actualHistogram.getCountAtValue(testValueLevel)); Assert.assertEquals( expectedHistogram.getCountAtValue(testValueLevel * 10), actualHistogram.getCountAtValue(testValueLevel * 10)); Assert.assertEquals( expectedHistogram.getTotalCount(), actualHistogram.getTotalCount()); }
protected void ResetIterator(AbstractHistogram histogram) { this.Histogram = histogram; this.SavedHistogramTotalRawCount = histogram.getTotalCount(); this.ArrayTotalCount = histogram.getTotalCount(); this._integerToDoubleValueConversionRatio = histogram.integerToDoubleValueConversionRatio; this.CurrentIndex = 0; this.CurrentValueAtIndex = 0; this.NextValueAtIndex = 1 << histogram.unitMagnitude; this._prevValueIteratedTo = 0; this._totalCountToPrevIndex = 0; this.TotalCountToCurrentIndex = 0; this._totalValueToCurrentIndex = 0; this.CountAtThisValue = 0; this._freshSubBucket = true; CurrentIterationValue.Reset(); }
protected void resetIterator(AbstractHistogram histogram) { this.histogram = histogram; this.savedHistogramTotalRawCount = histogram.getTotalCount(); this.arrayTotalCount = histogram.getTotalCount(); this.integerToDoubleValueConversionRatio = histogram.integerToDoubleValueConversionRatio; this.currentIndex = 0; this.currentValueAtIndex = 0; this.nextValueAtIndex = 1 << histogram.unitMagnitude; this.prevValueIteratedTo = 0; this.totalCountToPrevIndex = 0; this.totalCountToCurrentIndex = 0; this.totalValueToCurrentIndex = 0; this.countAtThisValue = 0; this.freshSubBucket = true; currentIterationValue.reset(); }
protected void resetIterator(/*final*/ AbstractHistogram histogram) { this.histogram = histogram; this.savedHistogramTotalRawCount = histogram.getTotalCount(); this.arrayTotalCount = histogram.getTotalCount(); this.currentBucketIndex = 0; this.currentSubBucketIndex = 0; this.currentValueAtIndex = 0; this.nextBucketIndex = 0; this.nextSubBucketIndex = 1; this.nextValueAtIndex = 1; this.prevValueIteratedTo = 0; this.totalCountToPrevIndex = 0; this.totalCountToCurrentIndex = 0; this.totalValueToCurrentIndex = 0; this.countAtThisValue = 0; this.freshSubBucket = true; if (this.currentIterationValue == null) { this.currentIterationValue = new HistogramIterationValue(); } this.currentIterationValue.reset(); }
public new void add(/*final*/ AbstractHistogram other) { // Synchronize add(). Avoid deadlocks by synchronizing in order of construction identity count. if (identity < other.identity) { lock (updateLock) { lock (other) { base.add(other); } } } else { lock (other) { lock (updateLock) { base.add(other); } } } }
public new void add(/*final*/ AbstractHistogram other) { // Synchronize add(). Avoid deadlocks by synchronizing in order of construction identity count. if (identity < other.identity) { lock (updateLock) { lock (other) { base.add(other); } } } else { lock(other) { lock (updateLock) { base.add(other); } } } }
/// <summary> /// Provide a means of iterating through all recorded histogram values using the finest granularity steps /// supported by the underlying representation. The iteration steps through all non-zero recorded value counts, /// and terminates when all recorded histogram values are exhausted. /// <seealso cref="RecordedValuesIterator" /> /// </summary> /// <param name="histogram">The histogram on which to iterate.</param> /// <returns></returns> public static IEnumerable <HistogramIterationValue> RecordedValues(this AbstractHistogram histogram) { return(IterateOver(new RecordedValuesIterator(histogram))); }
/** * @param histogram The histogram this iterator will operate on * @param percentileTicksPerHalfDistance The number of iteration steps per half-distance to 100%. */ public PercentileIterator(/*final*/ AbstractHistogram histogram, /*final*/ int percentileTicksPerHalfDistance) { this.reset(histogram, percentileTicksPerHalfDistance); }
/** * @param histogram The histogram this iterator will operate on */ public AllValuesIterator(/*final*/ AbstractHistogram histogram) { this.reset(histogram); }
/** * @param histogram The histogram this iterator will operate on * @param valueUnitsInFirstBucket the size (in value units) of the first value bucket step * @param logBase the multiplier by which the bucket size is expanded in each iteration step. */ public LogarithmicIterator(/*final*/ AbstractHistogram histogram, /*final*/ int valueUnitsInFirstBucket, /*final*/ double logBase) { this.reset(histogram, valueUnitsInFirstBucket, logBase); }
/// <summary> /// Construct a histogram with the same range settings as a given source histogram, /// duplicating the source's start/end timestamps (but NOT its contents). /// </summary> /// <param name="source">The source histogram to duplicate/</param> public Histogram(AbstractHistogram source) : this(source, true) { }
private void reset(/*final*/ AbstractHistogram histogram) { base.resetIterator(histogram); this.visitedSubBucketIndex = -1; this.visitedBucketIndex = -1; }
/** * @param histogram The histogram this iterator will operate on * @param valueUnitsPerBucket The size (in value units) of each bucket iteration. */ public LinearIterator(/*final*/ AbstractHistogram histogram, /*final*/ int valueUnitsPerBucket) { this.reset(histogram, valueUnitsPerBucket); }
/** * @param histogram The histogram this iterator will operate on */ public RecordedValuesIterator(AbstractHistogram histogram) { reset(histogram); }
/** * Construct a new histogram by decoding it from a ByteBuffer. * @param buffer The buffer to decode from * @param minBarForHighestTrackableValue Force highestTrackableValue to be set at least this high * @return The newly constructed histogram */ public static Histogram decodeFromByteBuffer(/*final*/ ByteBuffer buffer, /*final*/ long minBarForHighestTrackableValue) { return((Histogram)AbstractHistogram.decodeFromByteBuffer(buffer, typeof(Histogram), minBarForHighestTrackableValue)); }
private void reset(AbstractHistogram histogram) { ResetIterator(histogram); visitedIndex = -1; }
/** * Construct a histogram with the same range settings as a given source histogram, * duplicating the source's start/end timestamps (but NOT it's contents) * @param source The source histogram to duplicate */ public SynchronizedHistogram(AbstractHistogram source) : base(source) { }
void recordLoopWithExpectedInterval(AbstractHistogram histogram, long loopCount, long expectedInterval) { for (long i = 0; i < loopCount; i++) histogram.recordValueWithExpectedInterval(testValueLevel + (i & 0x8000), expectedInterval); }