internal Bucket GetCurrentBucket() { int newCurrentIndex; int initialCurrentIndex; Bucket bucket; Bucket newBucket; long currentTime; do { currentTime = clock.EllapsedTimeInMs; initialCurrentIndex = currentBucketIndex; bucket = buckets[initialCurrentIndex]; if (bucket.bucketStartInMs + bucketSizeInMs > currentTime) { return(bucket); } newCurrentIndex = (currentBucketIndex + 1) % (numberOfBuckets + 1); newBucket = buckets[newCurrentIndex]; bool lockTacken = false; try { Monitor.TryEnter(gate, 10, ref lockTacken); if (!lockTacken) { return(newBucket); } newBucket.Reset(currentTime); } finally { if (lockTacken) { Monitor.Exit(gate); } } }while (Interlocked.CompareExchange(ref currentBucketIndex, newCurrentIndex, initialCurrentIndex) != initialCurrentIndex); var items = from b in GetBuckets() select new SnapshotItem { Length = b.Length, Data = b.data }; _percentileSnapshot = new PercentileSnapshot(items.ToArray()); return(newBucket); }
internal RollingPercentileNumber(IClock clock, int timeInMs, int numberOfBuckets, int dataLength, IDynamicProperty<bool> enabled) { this.enabled = enabled; this.TimeInMs = timeInMs; this.clock = clock; this.bucketSizeInMs = timeInMs / numberOfBuckets; var cx = numberOfBuckets + 1; // + one spare buckets = new Bucket[cx]; this.numberOfBuckets = numberOfBuckets; for (int i = 0; i < cx; i++) { buckets[i] = new Bucket(dataLength); } buckets[0].bucketStartInMs = clock.EllapsedTimeInMs; _percentileSnapshot = new PercentileSnapshot(GetBuckets().Select(b=>new SnapshotItem { Length = b.Length, Data = b.data }).ToArray()); }
internal RollingPercentileNumber(IClock clock, int timeInMs, int numberOfBuckets, int dataLength, IDynamicProperty <bool> enabled) { this.enabled = enabled; this.TimeInMs = timeInMs; this.clock = clock; this.bucketSizeInMs = timeInMs / numberOfBuckets; var cx = numberOfBuckets + 1; // + one spare buckets = new Bucket[cx]; this.numberOfBuckets = numberOfBuckets; for (int i = 0; i < cx; i++) { buckets[i] = new Bucket(dataLength); } buckets[0].bucketStartInMs = clock.EllapsedTimeInMs; _percentileSnapshot = new PercentileSnapshot(GetBuckets().Select(b => new SnapshotItem { Length = b.Length, Data = b.data }).ToArray()); }
public void testRolling() { MockedClock time = new MockedClock(); RollingPercentileNumber p = new RollingPercentileNumber(time, timeInMilliseconds, numberOfBuckets, bucketDataLength, DynamicProperties.Factory.AsProperty(true)); p.AddValue(1000); p.AddValue(1000); p.AddValue(1000); p.AddValue(2000); Assert.Equal(1, p.GetBuckets().Count()); // no bucket turnover yet so percentile not yet generated Assert.Equal(0, p.GetPercentile(50)); time.Increment(6000); // still only 1 bucket until we touch it again Assert.Equal(1, p.GetBuckets().Count()); // a bucket has been created so we have a new percentile Assert.Equal(1000, p.GetPercentile(50)); // now 2 buckets since getting a percentile causes bucket retrieval Assert.Equal(2, p.GetBuckets().Count()); p.AddValue(1000); p.AddValue(500); // should still be 2 buckets Assert.Equal(2, p.GetBuckets().Count()); p.AddValue(200); p.AddValue(200); p.AddValue(1600); p.AddValue(200); p.AddValue(1600); p.AddValue(1600); // we haven't progressed to a new bucket so the percentile should be the same and ignore the most recent bucket Assert.Equal(1000, p.GetPercentile(50)); // Increment to another bucket so we include all of the above in the PercentileSnapshot time.Increment(6000); // the rolling version should have the same data as creating a snapshot like this PercentileSnapshot ps = new PercentileSnapshot(1000, 1000, 1000, 2000, 1000, 500, 200, 200, 1600, 200, 1600, 1600); Assert.Equal(ps.GetPercentile(0.15), p.GetPercentile(0.15)); Assert.Equal(ps.GetPercentile(0.50), p.GetPercentile(0.50)); Assert.Equal(ps.GetPercentile(0.90), p.GetPercentile(0.90)); Assert.Equal(ps.GetPercentile(0.995), p.GetPercentile(0.995)); //Console.WriteLine("100th: " + ps.GetPercentile(100) + " " + p.GetPercentile(100)); //Console.WriteLine("99.5th: " + ps.GetPercentile(99.5) + " " + p.GetPercentile(99.5)); //Console.WriteLine("99th: " + ps.GetPercentile(99) + " " + p.GetPercentile(99)); //Console.WriteLine("90th: " + ps.GetPercentile(90) + " " + p.GetPercentile(90)); //Console.WriteLine("50th: " + ps.GetPercentile(50) + " " + p.GetPercentile(50)); //Console.WriteLine("10th: " + ps.GetPercentile(10) + " " + p.GetPercentile(10)); // mean = 1000+1000+1000+2000+1000+500+200+200+1600+200+1600+1600/12 Assert.Equal(991, ps.Mean); }
public void testPercentileAlgorithm_Extremes() { PercentileSnapshot p = new PercentileSnapshot(2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 2, 800, 768, 657, 700, 867); //Console.WriteLine("0.01: " + p.GetPercentile(0.01)); //Console.WriteLine("10th: " + p.GetPercentile(10)); //Console.WriteLine("Median: " + p.GetPercentile(50)); //Console.WriteLine("75th: " + p.GetPercentile(75)); //Console.WriteLine("90th: " + p.GetPercentile(90)); //Console.WriteLine("99th: " + p.GetPercentile(99)); //Console.WriteLine("99.5th: " + p.GetPercentile(99.5)); //Console.WriteLine("99.99: " + p.GetPercentile(99.99)); Assert.Equal(2, p.GetPercentile(50)); Assert.Equal(2, p.GetPercentile(10)); Assert.Equal(2, p.GetPercentile(75)); if (p.GetPercentile(95) < 600) { throw new Exception("We expect the 90th to be over 600 to show the extremes but got: " + p.GetPercentile(90)); } if (p.GetPercentile(99) < 600) { throw new Exception("We expect the 99th to be over 600 to show the extremes but got: " + p.GetPercentile(99)); } }
public void testPercentileAlgorithm_Median4() { PercentileSnapshot list = new PercentileSnapshot(300, 75, 125, 500, 100, 160, 180, 200, 210, 50, 170); // unsorted so it is expected to sort it for us // list.AddValue(300); // 10 // list.AddValue(75); // 2 // list.AddValue(125); // 4 // list.AddValue(500); // 11 // list.AddValue(100); // 3 // list.AddValue(160); // 5 // list.AddValue(180); // 7 // list.AddValue(200); // 8 // list.AddValue(210); // 9 // list.AddValue(50); // 1 // list.AddValue(170); // 6 Assert.Equal(175, list.GetPercentile(50)); }
public void testPercentileAlgorithm_Median3() { PercentileSnapshot list = new PercentileSnapshot(50, 75, 100, 125, 160, 170, 180, 200, 210, 300, 500); // list.AddValue(50); // 1 // list.AddValue(75); // 2 // list.AddValue(100); // 3 // list.AddValue(125); // 4 // list.AddValue(160); // 5 // list.AddValue(170); // 6 // list.AddValue(180); // 7 // list.AddValue(200); // 8 // list.AddValue(210); // 9 // list.AddValue(300); // 10 // list.AddValue(500); // 11 Assert.Equal(175, list.GetPercentile(50)); }
public void testPercentileAlgorithm_Median2() { PercentileSnapshot list = new PercentileSnapshot(100, 100, 100, 100, 100, 100, 100, 100, 100, 100, 500); Assert.Equal(100, list.GetPercentile(50)); }
internal Bucket GetCurrentBucket() { int newCurrentIndex; int initialCurrentIndex; Bucket bucket; Bucket newBucket; long currentTime; do { currentTime = clock.EllapsedTimeInMs; initialCurrentIndex = currentBucketIndex; bucket = buckets[initialCurrentIndex]; if (bucket.bucketStartInMs + bucketSizeInMs > currentTime) { return bucket; } newCurrentIndex = (currentBucketIndex + 1) % (numberOfBuckets + 1); newBucket = buckets[newCurrentIndex]; bool lockTacken = false; try { Monitor.TryEnter(gate, 10, ref lockTacken); if (!lockTacken) { return newBucket; } newBucket.Reset(currentTime); } finally { if (lockTacken) Monitor.Exit(gate); } } while (Interlocked.CompareExchange(ref currentBucketIndex, newCurrentIndex, initialCurrentIndex) != initialCurrentIndex); var items = from b in GetBuckets() select new SnapshotItem { Length = b.Length, Data = b.data }; _percentileSnapshot = new PercentileSnapshot(items.ToArray()); return newBucket; }