public unsafe void ThreadedWritesAreSafe() { const int writeCount = 1000000; var dimSet = DimensionSetTests.CreateDimensionSet(1); using (var data = GetTestData(writeCount, dimSet)) { Parallel.For(0, writeCount, i => { var key = (uint)i; Assert.IsTrue(data.TryWrite(&key, i)); }); data.Seal(); var seen = new HashSet <long>(); foreach (var kvp in data) { Assert.AreEqual(kvp.Key[0], kvp.Value); Assert.IsFalse(seen.Contains(kvp.Value)); seen.Add(kvp.Value); } Assert.AreEqual(writeCount, seen.Count); } }
public unsafe void CanConvertFromZeroToMultipleDimensions() { for (var i = 1; i < 10; ++i) { var dimSet = DimensionSetTests.CreateDimensionSet(i); const uint valueCount = 100; using (var data = GetTestData((int)valueCount, DimensionSet.Empty)) { for (uint c = 0; c < valueCount; ++c) { Assert.IsTrue(data.TryWrite(null, c)); } data.Seal(); data.Convert(dimSet); uint currentCount = 0; var wildcardKey = Key.GetWildcardKey(dimSet); foreach (var kvp in data) { Assert.AreEqual(wildcardKey, kvp.Key); Assert.AreEqual(currentCount, kvp.Value); ++currentCount; } } } }
public unsafe void CanMergeManySources() { const int writeCount = 100000; const int maxKeyValue = 20; // we want a lot of collisions. var data = new BufferedKeyedData <int> [20]; var dimensionSet = DimensionSetTests.CreateDimensionSet(1); for (var i = 0; i < data.Length; ++i) { data[i] = new BufferedKeyedData <int>( new byte[ BufferedKeyedData <int> .GetBufferSizeForKeyCount(writeCount / data.Length, dimensionSet)], 0, 0, new DimensionSet(dimensionSet), true); } var rng = new Random(); var expected = new Dictionary <uint, List <int> >(); for (int i = 0; i < writeCount; ++i) { var key = (uint)rng.Next(maxKeyValue); Assert.IsTrue(data[i % data.Length].TryWrite(&key, i)); List <int> expectedValuesForKey; if (!expected.TryGetValue(key, out expectedValuesForKey)) { expectedValuesForKey = new List <int>(); expected.Add(key, expectedValuesForKey); } expectedValuesForKey.Add(i); } var dataToMerge = new List <IEnumerable <KeyValuePair <Key, IMergeSource> > >(data.Length); foreach (var d in data) { d.Seal(); d.Sort(); var chunk = new List <KeyValuePair <Key, IMergeSource> >(d.Count); chunk.AddRange(d.Select(kvp => new KeyValuePair <Key, IMergeSource>(kvp.Key.Clone() as Key, new LongData(kvp.Value)))); dataToMerge.Add(chunk); } Key currentKey = null; foreach (var kvp in KeyedDataMerge <LongData> .MergeSorted(dataToMerge)) { Assert.IsTrue(currentKey < kvp.Key); currentKey = kvp.Key.Clone() as Key; var expectedData = expected[kvp.Key[0]]; Assert.AreEqual(expectedData.Sum(), kvp.Value.Value); expected.Remove(kvp.Key[0]); } }
public void SetUp() { if (this.memoryStreamManager == null) { this.memoryStreamManager = new RecyclableMemoryStreamManager(1 << 17, 1 << 20, 1 << 24); } this.dimensionSet = DimensionSetTests.CreateDimensionSet(2); this.dataStore = new TestKeyedDataStore(this.dimensionSet, this.memoryStreamManager); }
public void GeneratedWildcardKeysAreValid() { for (var i = 0; i < 128; ++i) { var key = Key.GetWildcardKey(DimensionSetTests.CreateDimensionSet(i)); for (var ki = 0; ki < key.Values.Length; ++ki) { Assert.AreEqual(Key.WildcardDimensionValue, key.Values[ki]); } } }
public unsafe void CanWriteMultipleKeysWithSameValue() { const int sameKeyCount = 4; var dimSet = DimensionSetTests.CreateDimensionSet(1); using (var data = GetTestData(sameKeyCount * 2, dimSet)) { uint otherKey = 0; uint staticKey = 867; // cannot be const (can't take address) for (var i = 0; i < sameKeyCount; ++i) { dimSet.dimensions[0].StringToIndex(otherKey.ToString()); Assert.IsTrue(data.TryWrite(&otherKey, 42)); ++otherKey; dimSet.dimensions[0].StringToIndex(staticKey.ToString()); Assert.IsTrue(data.TryWrite(&staticKey, 5309)); } data.Seal(); Assert.AreEqual(sameKeyCount * 2, data.Count); otherKey = 0; var count = 0; var dupeCount = 0; foreach (var kvp in data) { if (count % 2 == 0) { Assert.AreEqual(otherKey, kvp.Key.Values[0]); Assert.AreEqual(42, kvp.Value); ++otherKey; } else { Assert.AreEqual(staticKey, kvp.Key.Values[0]); Assert.AreEqual(5309, kvp.Value); ++dupeCount; } ++count; } Assert.AreEqual(sameKeyCount, dupeCount); } }
private TestBufferedKeyedData Generate(int[] levels, bool random) { var dims = DimensionSetTests.CreateDimensionSet(levels.Length); int totalKeys = 1; for (var i = 0; i < levels.Length; ++i) { totalKeys *= levels[i]; } var data = GetTestData(totalKeys, dims); uint count = 0; var key = new Key(new uint[levels.Length]); this.WriteLevel(data, random, levels, -1, key, ref count); Assert.AreEqual(totalKeys, data.Count); data.Seal(); return(data); }
public unsafe void CanReadSerializedData() { const int writeCount = 1000000; var dimSet = DimensionSetTests.CreateDimensionSet(1); var newStream = new MemoryStream(); using (var data = GetTestData(writeCount, dimSet)) { for (var i = 0; i < writeCount; ++i) { dimSet.dimensions[0].StringToIndex(i.ToString()); } Parallel.For(0, writeCount, i => { var key = (uint)i; Assert.IsTrue(data.TryWrite(&key, i)); }); data.Seal(); data.Serialize(newStream); } using (var data = new TestBufferedKeyedData(newStream.GetBuffer(), 0, (int)newStream.Length, dimSet)) { var seen = new HashSet <long>(); foreach (var kvp in data) { Assert.AreEqual(kvp.Key[0], kvp.Value); Assert.IsFalse(seen.Contains(kvp.Value)); seen.Add(kvp.Value); } Assert.AreEqual(writeCount, seen.Count); } }
public async Task OneMinuteHeavyLoad() { var start = DateTime.Now; var end = start + this.dataManager.CompactionConfiguration.Default.Interval; var hitCounters = new List <SingleCounterConfiguration <HitCounter> >(); var histogramCounters = new List <SingleCounterConfiguration <HistogramCounter> >(); var twoDimSet = DimensionSetTests.CreateDimensionSet(2); var threeDimSet = DimensionSetTests.CreateDimensionSet(3); for (var i = 0; i < 10; ++i) { var counter = new SingleCounterConfiguration <HitCounter> { Counter = await this.dataManager.CreateHitCounter("/HitCounter/TwoDim/" + i, twoDimSet), DimensionMaximums = new[] { 5, 500 }, WritesPerInterval = 60 * 10, Writer = this.WriteHitCount, }; hitCounters.Add(counter); } for (var i = 0; i < 5; ++i) { var counter = new SingleCounterConfiguration <HistogramCounter> { Counter = await this.dataManager.CreateHistogramCounter("/SmallHistogram/TwoDim/" + i, twoDimSet), DimensionMaximums = new[] { 5, 500 }, WritesPerInterval = 60 * 10, Writer = this.WriteSmallHistogram, }; histogramCounters.Add(counter); } for (var i = 0; i < 10; ++i) { var counter = new SingleCounterConfiguration <HitCounter> { Counter = await this.dataManager.CreateHitCounter("/HitCounter/ThreeDim/" + i, threeDimSet), DimensionMaximums = new[] { 5, 250, 1000 }, WritesPerInterval = 60 * 10 * 1000, Writer = this.WriteHitCount, }; hitCounters.Add(counter); } for (var i = 0; i < 5; ++i) { var counter = new SingleCounterConfiguration <HistogramCounter> { Counter = await this.dataManager.CreateHistogramCounter("/SmallHistogram/ThreeDim/" + i, threeDimSet), DimensionMaximums = new[] { 5, 250, 1000 }, WritesPerInterval = 60 * 10 * 1000, Writer = this.WriteSmallHistogram, }; histogramCounters.Add(counter); } var tasks = new List <Task>(); foreach (var config in hitCounters) { var t = new Task(() => this.WriteRandomlyKeyedData(config, start, end)); t.Start(); tasks.Add(t); } foreach (var config in histogramCounters) { var t = new Task(() => this.WriteRandomlyKeyedData(config, start, end)); t.Start(); tasks.Add(t); } await Task.WhenAll(tasks); foreach (var c in hitCounters) { c.Counter.DataSet.Flush(); foreach (var d in c.Counter.Query(new DimensionSpecification())) { Trace.WriteLine(d); } } foreach (var c in histogramCounters) { c.Counter.DataSet.Flush(); foreach (var d in c.Counter.Query(new DimensionSpecification())) { Trace.WriteLine(d); } } }