public void SortIsStable() { var ms = new MemoryStream(); using (var bw = new BinaryWriter(ms, Encoding.Default, true)) { bw.Write((uint)867); bw.Write((uint)5310); bw.Write((uint)42); bw.Write((uint)0); bw.Write((uint)867); bw.Write((uint)5309); } using (var data = new BufferedKeyedData <uint>(ms.GetBuffer(), 0, (int)ms.Length, new DimensionSet(new HashSet <Dimension> { new Dimension("dummy") }))) { data.Sort(); Assert.AreEqual(3, data.Count); var pairs = (from pair in data select new KeyValuePair <Key, long>(pair.Key.Clone() as Key, pair.Value)).ToList(); Assert.AreEqual((uint)42, pairs[0].Key.Values[0]); Assert.AreEqual((uint)0, pairs[0].Value); Assert.AreEqual((uint)867, pairs[1].Key.Values[0]); Assert.AreEqual((uint)5310, pairs[1].Value); Assert.AreEqual((uint)867, pairs[2].Key.Values[0]); Assert.AreEqual((uint)5309, pairs[2].Value); } }
public unsafe void CanMergeManySources() { const int writeCount = 100000; const int maxKeyValue = 20; // we want a lot of collisions. var data = new BufferedKeyedData <int> [20]; var dimensionSet = DimensionSetTests.CreateDimensionSet(1); for (var i = 0; i < data.Length; ++i) { data[i] = new BufferedKeyedData <int>( new byte[ BufferedKeyedData <int> .GetBufferSizeForKeyCount(writeCount / data.Length, dimensionSet)], 0, 0, new DimensionSet(dimensionSet), true); } var rng = new Random(); var expected = new Dictionary <uint, List <int> >(); for (int i = 0; i < writeCount; ++i) { var key = (uint)rng.Next(maxKeyValue); Assert.IsTrue(data[i % data.Length].TryWrite(&key, i)); List <int> expectedValuesForKey; if (!expected.TryGetValue(key, out expectedValuesForKey)) { expectedValuesForKey = new List <int>(); expected.Add(key, expectedValuesForKey); } expectedValuesForKey.Add(i); } var dataToMerge = new List <IEnumerable <KeyValuePair <Key, IMergeSource> > >(data.Length); foreach (var d in data) { d.Seal(); d.Sort(); var chunk = new List <KeyValuePair <Key, IMergeSource> >(d.Count); chunk.AddRange(d.Select(kvp => new KeyValuePair <Key, IMergeSource>(kvp.Key.Clone() as Key, new LongData(kvp.Value)))); dataToMerge.Add(chunk); } Key currentKey = null; foreach (var kvp in KeyedDataMerge <LongData> .MergeSorted(dataToMerge)) { Assert.IsTrue(currentKey < kvp.Key); currentKey = kvp.Key.Clone() as Key; var expectedData = expected[kvp.Key[0]]; Assert.AreEqual(expectedData.Sum(), kvp.Value.Value); expected.Remove(kvp.Key[0]); } }
public QueryableMultiValueData(MemoryStream keyStream, MemoryStream valueStream, DimensionSet dimensionSet) : base(dimensionSet) { if (keyStream.Length == 0) { this.keys = new BufferedKeyedData <uint>(null, 0, 0, dimensionSet); keyStream.Dispose(); valueStream.Dispose(); } else { this.keyStream = keyStream; this.keys = new BufferedKeyedData <uint>(keyStream.GetBuffer(), 0, (int)keyStream.Length, dimensionSet); this.values = BufferedValueArray.Create(valueStream.GetBuffer(), 0, (int)valueStream.Length); this.valueStream = valueStream; } }
public QueryableMultiValueData(PersistedDataType type, MemoryStream source, DimensionSet dimensionSet, int keyCount) : base(dimensionSet) { if (source.Length == 0) { this.keys = new BufferedKeyedData <uint>(null, 0, 0, dimensionSet); source.Dispose(); } else { this.keyStream = source; this.valueStream = null; var keyPortionLength = (int)BufferedKeyedData <uint> .GetBufferSizeForKeyCount(keyCount, dimensionSet); var sourceBuffer = source.GetBuffer(); var sourceLength = (int)source.Length; this.keys = new BufferedKeyedData <uint>(sourceBuffer, 0, keyPortionLength, dimensionSet); this.values = BufferedValueArray.Create(type, sourceBuffer, keyPortionLength, sourceLength - keyPortionLength); } }
protected override void Dispose(bool disposing) { if (this.keys != null) { this.keys.Dispose(); this.keys = null; } if (this.keyStream != null) { this.keyStream.Dispose(); this.keyStream = null; } if (this.values != null) // may be null for empty sets of keys. { this.values.Dispose(); this.values = null; } if (this.valueStream != null) { this.valueStream.Dispose(); this.valueStream = null; } }
public void SortIsStable() { var ms = new MemoryStream(); using (var bw = new BinaryWriter(ms, Encoding.Default, true)) { bw.Write((uint)867); bw.Write((uint)5310); bw.Write((uint)42); bw.Write((uint)0); bw.Write((uint)867); bw.Write((uint)5309); } using (var data = new BufferedKeyedData<uint>(ms.GetBuffer(), 0, (int)ms.Length, new DimensionSet(new HashSet<Dimension> {new Dimension("dummy")}))) { data.Sort(); Assert.AreEqual(3, data.Count); var pairs = (from pair in data select new KeyValuePair<Key, long>(pair.Key.Clone() as Key, pair.Value)).ToList(); Assert.AreEqual((uint)42, pairs[0].Key.Values[0]); Assert.AreEqual((uint)0, pairs[0].Value); Assert.AreEqual((uint)867, pairs[1].Key.Values[0]); Assert.AreEqual((uint)5310, pairs[1].Value); Assert.AreEqual((uint)867, pairs[2].Key.Values[0]); Assert.AreEqual((uint)5309, pairs[2].Value); } }
public unsafe void CanMergeManySources() { const int writeCount = 100000; const int maxKeyValue = 20; // we want a lot of collisions. var data = new BufferedKeyedData<int>[20]; var dimensionSet = DimensionSetTests.CreateDimensionSet(1); for (var i = 0; i < data.Length; ++i) { data[i] = new BufferedKeyedData<int>( new byte[ BufferedKeyedData<int>.GetBufferSizeForKeyCount(writeCount / data.Length, dimensionSet)], 0, 0, new DimensionSet(dimensionSet), true); } var rng = new Random(); var expected = new Dictionary<uint, List<int>>(); for (int i = 0; i < writeCount; ++i) { var key = (uint)rng.Next(maxKeyValue); Assert.IsTrue(data[i % data.Length].TryWrite(&key, i)); List<int> expectedValuesForKey; if (!expected.TryGetValue(key, out expectedValuesForKey)) { expectedValuesForKey = new List<int>(); expected.Add(key, expectedValuesForKey); } expectedValuesForKey.Add(i); } var dataToMerge = new List<IEnumerable<KeyValuePair<Key, IMergeSource>>>(data.Length); foreach (var d in data) { d.Seal(); d.Sort(); var chunk = new List<KeyValuePair<Key, IMergeSource>>(d.Count); chunk.AddRange(d.Select(kvp => new KeyValuePair<Key, IMergeSource>(kvp.Key.Clone() as Key, new LongData(kvp.Value)))); dataToMerge.Add(chunk); } Key currentKey = null; foreach (var kvp in KeyedDataMerge<LongData>.MergeSorted(dataToMerge)) { Assert.IsTrue(currentKey < kvp.Key); currentKey = kvp.Key.Clone() as Key; var expectedData = expected[kvp.Key[0]]; Assert.AreEqual(expectedData.Sum(), kvp.Value.Value); expected.Remove(kvp.Key[0]); } }