/// <summary> /// Get the index of the biggest item less than the value using fully ordered comparison /// Compare on index key first then on primary key /// </summary> /// <param name="value"></param> /// <param name="startIndex"></param> /// <param name="endIndex"></param> /// <returns></returns> private int FindInsertAfterIndex(PackedObject value, int startIndex, int endIndex) { var mid = startIndex + (endIndex - startIndex) / 2; var midValue = _data[mid]; var newStart = startIndex; var newEnd = endIndex; if (midValue == value) { return(mid); } if (Compare(midValue, value) > 0) { newEnd = mid; } else { newStart = mid; } if (newStart == newEnd) //so also equal to mid { if (Compare(midValue, value) < 0) { return(mid); } return(-1); } if (newStart == newEnd - 1) { if (Compare(_data[newEnd], value) < 0) { return(newEnd); } if (Compare(_data[newStart], value) < 0) { return(newStart); } return(-1); } return(FindInsertAfterIndex(value, newStart, newEnd)); }
/// <summary> /// Add a new item to the eviction queue. The item is stored at the end (less likely to be evicted) /// REQUIRE: Item not already present in the queue /// </summary> /// <param name="newItem"></param> public void AddNew(PackedObject newItem) { if (newItem == null) { throw new ArgumentNullException(nameof(newItem)); } lock (_syncRoot) { if (_cachedObjectsByKey.ContainsKey(newItem.PrimaryKey)) { throw new NotSupportedException("Item already in eviction queue"); } var lastNode = _queue.AddLast(newItem); _cachedObjectsByKey.Add(newItem.PrimaryKey, lastNode); } }
/// <summary> /// Mark the item as used. Moves it at the end of the queue /// If the item is not present ignore (may be useful if certain items are excluded by the eviction policy) /// </summary> /// <param name="item"></param> public void Touch(PackedObject item) { if (item == null) { throw new ArgumentNullException(nameof(item)); } lock (_syncRoot) { if (_cachedObjectsByKey.ContainsKey(item.PrimaryKey)) { var node = _cachedObjectsByKey[item.PrimaryKey]; _queue.Remove(node); _queue.AddLast(node); } } }
/// <summary> /// Update an object previously stored /// The primary key must be the same, all others can change /// </summary> /// <param name="item"></param> public void InternalUpdate(PackedObject item) { if (item == null) { throw new ArgumentNullException(nameof(item)); } if (!DataByPrimaryKey.ContainsKey(item.PrimaryKey)) { var msg = $"Update called for the object {item} which is not stored in the cache"; throw new NotSupportedException(msg); } InternalRemoveByPrimaryKey(item.PrimaryKey); InternalAddNew(item); EvictionPolicy.Touch(item); }
public void EvictionOrder() { var schema = TypedSchemaFactory.FromType(typeof(TradeLike)); var queue = new EvictionQueue { Capacity = 9, EvictionCount = 2 }; var allItems = new List <PackedObject>(); for (var i = 0; i < 10; i++) { var item = new TradeLike(i, 1000 + i, "aaa", DateTime.Now, 456); var packedItem = PackedObject.Pack(item, schema); queue.AddNew(packedItem); allItems.Add(packedItem); } //items in queue now: 0 1 2 3 4 5 6 7 8 9 Assert.IsTrue(queue.EvictionRequired); var evicted = queue.Go(); //items in queue: 3 4 5 6 7 8 9 Assert.AreEqual(evicted.Count, 3); Assert.AreEqual(evicted[0].PrimaryKey, 0); Assert.AreEqual(evicted[1].PrimaryKey, 1); queue.Touch(allItems[3]); //items in queue: 4 5 6 7 8 9 3 queue.Touch(allItems[4]); //items in queue: 5 6 7 8 9 3 4 queue.Capacity = 7; evicted = queue.Go(); Assert.AreEqual(evicted.Count, 2); Assert.AreEqual(evicted[0].PrimaryKey, 5); Assert.AreEqual(evicted[1].PrimaryKey, 6); }
private int Compare(PackedObject left, PackedObject right) { var key1 = left.Values[_keyIndex]; var key2 = right.Values[_keyIndex]; var result = key1.CompareTo(key2); if (result != 0) { return(result); } var key1Primary = left.PrimaryKey; var key2Primary = right.PrimaryKey; return(key1Primary.CompareTo(key2Primary)); }
public void StreamUnstreamOneCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var item1 = new CacheableTypeOk(1, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it1 = PackedObject.Pack(item1, schema); using var stream = new MemoryStream(); Streamer.ToStream(stream, it1); stream.Seek(0, SeekOrigin.Begin); var reloaded = Streamer.FromStream <PackedObject>(stream); var original = PackedObject.Unpack <CacheableTypeOk>(reloaded); Assert.IsTrue(original is CacheableTypeOk); }
public void StreamUnstreamManyCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var items = new List <PackedObject>(3); var item1 = new CacheableTypeOk(1, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it1 = PackedObject.Pack(item1, schema); items.Add(it1); var item2 = new CacheableTypeOk(2, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it2 = PackedObject.Pack(item2, schema); items.Add(it2); var item3 = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it3 = PackedObject.Pack(item3, schema); items.Add(it3); using (var stream = new MemoryStream()) { Streamer.ToStreamMany(stream, items, new int[0], null); stream.Seek(0, SeekOrigin.Begin); var itemsReceived = 0; Streamer.FromStream(stream, delegate(CacheableTypeOk data, int currentItem, int totalItems) { Assert.IsTrue(currentItem > 0); Assert.IsTrue(currentItem <= totalItems); itemsReceived++; Assert.AreEqual(itemsReceived, data.PrimaryKey); }, delegate { Assert.Fail(); }); Assert.AreEqual(itemsReceived, 3); } }
public void LessRecentlyUsed() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new LruEvictionPolicy(10, 2); for (var i = 0; i < 100; i++) { var item = new CacheableTypeOk(i, i + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed = PackedObject.Pack(item, schema); policy.AddItem(packed); } Assert.IsTrue(policy.IsEvictionRequired); var toRemove = policy.DoEviction(); Assert.AreEqual(92, toRemove.Count); var item93 = new CacheableTypeOk(93, 93 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed93 = PackedObject.Pack(item93, schema); // check that the 93rd item was not removed Assert.IsFalse(toRemove.Any(i => i == packed93)); policy.Touch(packed93); var item100 = new CacheableTypeOk(100, 100 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed100 = PackedObject.Pack(item100, schema); var item101 = new CacheableTypeOk(101, 101 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed101 = PackedObject.Pack(item101, schema); policy.AddItem(packed100); policy.AddItem(packed101); toRemove = policy.DoEviction(); Assert.AreEqual(2, toRemove.Count); // item 93 was not removed because it was recently used (the call to Touch) Assert.IsFalse(toRemove.Any(i => i == packed93)); }
public void ComputePivotWithServerValues() { var description = TypedSchemaFactory.FromType(typeof(Order)); var order1 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order2 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "sf", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var packed1 = PackedObject.Pack(order1, description); var packed2 = PackedObject.Pack(order2, description); var pivot = new PivotLevel(); // Amount and Quantity to be aggregated (index 1 and 2) in the schema pivot.AggregateOneObject(packed1, new List <int>(), new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int>(), new List <int> { 1, 2 }); // Amount and Quantity should be aggregated Assert.AreEqual(2, pivot.AggregatedValues.Count); var agg = pivot.AggregatedValues.First(v => v.ColumnName == "Amount"); Assert.AreEqual(2, agg.Count); Assert.AreEqual(order1.Amount + order2.Amount, agg.Sum); Console.WriteLine(pivot.ToString()); }
private void InternalAddNew(PackedObject packedObject) { if (packedObject.PrimaryKey.IsNull) { throw new NotSupportedException( $"Can not insert an object with null primary key: collection {CollectionSchema.CollectionName}"); } if (packedObject.CollectionName != CollectionSchema.CollectionName) { throw new InvalidOperationException( $"An object of type {packedObject.CollectionName} can not be stored in DataStore of type {CollectionSchema.CollectionName}"); } var primaryKey = packedObject.PrimaryKey; if (ReferenceEquals(primaryKey, null)) { throw new InvalidOperationException("can not store an object having a null primary key"); } DataByPrimaryKey.Add(primaryKey, packedObject); foreach (var metadata in CollectionSchema.UniqueKeyFields) { var value = packedObject.Values[metadata.Order]; var dictionaryToUse = _dataByUniqueKey[value.KeyName]; dictionaryToUse.Add(value, packedObject); } foreach (var index in _dataByIndexKey) { index.Value.Put(packedObject); } if (packedObject.FullText != null && packedObject.FullText.Length > 0) { _fullTextIndex.IndexDocument(packedObject); } }
public void StreamManyUnstreamOneCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var item = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it = PackedObject.Pack(item, schema); var oneItemList = new List <PackedObject> { it }; using (var stream = new MemoryStream()) { Streamer.ToStreamMany(stream, oneItemList, new int[0], null); stream.Seek(0, SeekOrigin.Begin); var itemReloaded = Streamer.FromStream <CacheableTypeOk>(stream); Assert.IsNotNull(itemReloaded); Assert.AreEqual(itemReloaded, item); } }
public void Test_functions_vs_queries() { var schema = TypedSchemaFactory.FromType <Order>(); var queries = WhereClausesForOrders() .Select(w => ExpressionTreeHelper.PredicateToQuery(w, schema.CollectionName)).ToList(); var predicates = WhereClausesForOrders().Select(w => w.Compile()).ToList(); var count = queries.Count; Assert.AreEqual(count, predicates.Count); var objects = Order.GenerateTestData(1000); var packed = objects.Select(o => PackedObject.Pack(o, schema)).ToList(); var ds = new DataStore(schema, new NullEvictionPolicy(), new FullTextConfig()); ds.InternalPutMany(packed, true); for (var i = 0; i < count; i++) { var fromObjects = objects.Where(predicates[i]).ToList(); var qm = new QueryManager(ds); var fromDataSource = qm.ProcessQuery(queries[i]); Console.WriteLine($"{queries[i]} returned {fromDataSource.Count}"); Console.WriteLine("execution plan:"); Console.WriteLine(qm.ExecutionPlan); Console.WriteLine(); Assert.AreEqual(fromObjects.Count, fromDataSource.Count); } }
/// <summary> /// Index a document. A document is an ordered sequence of lines /// </summary> public void IndexDocument([NotNull] PackedObject item) { if (item == null) { throw new ArgumentNullException(nameof(item)); } var primaryKey = item.PrimaryKey; // update = delete + insert if (PositionsByDocument.ContainsKey(primaryKey)) { DeleteDocument(primaryKey); } IList <TokenizedLine> lines; if (item.TokenizedFullText != null) { lines = item.TokenizedFullText; } else { lines = Tokenizer.Tokenize(item.FullText); item.TokenizedFullText = lines; } var lineIndex = 0; foreach (var line in lines) { IndexLine(line, lineIndex, primaryKey); lineIndex++; } }
public override void Put(PackedObject item) { // first time get the index of the indexation key(this value is fixed for a cacheable data type) if (_keyIndex == -1) { _keyIndex = KeyInfo.Order; } if (!_insideFeedSession) { if (_data.Count == 0) { _data.Add(item); return; } var indexInsertAfter = FindInsertAfterIndex(item, 0, _data.Count - 1); if (indexInsertAfter != -1) { if (indexInsertAfter == _data.Count - 1) { _data.Add(item); } else { _data.Insert(indexInsertAfter + 1, item); } } else { _data.Insert(0, item); } } else { _tempData.Add(item); } }
public void Compare_packing_result_for_different_methods() { var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } } }; var desc = TypedSchemaFactory.FromType <Home>(); //// warm up //var unused = PackedObject.Pack(home, desc); //var v1 = unused.ToString(); var unused = PackedObject.Pack(home, desc); var v2 = unused.ToString(); var json = SerializationHelper.ObjectToJson(home); unused = PackedObject.PackJson(json, desc); var v3 = unused.ToString(); //Assert.AreEqual(v1, v2); Assert.AreEqual(v2, v3); }
public void Packing_a_binary_object_and_its_json_should_give_identical_results_with_default_index_type() { var schema = TypedSchemaFactory.FromType(typeof(Order)); var testObj = new Order { Amount = 66.5, Date = DateTimeOffset.Now, Category = "student", ClientId = 101, ProductId = 405, Id = Guid.NewGuid(), Quantity = 1, IsDelivered = true }; var description = TypedSchemaFactory.FromType <Order>(); var typeDescription = description; var packed1 = PackedObject.Pack(testObj, schema); var json = SerializationHelper.ObjectToJson(testObj); var packed2 = PackedObject.PackJson(json, typeDescription); Console.WriteLine(packed1); Console.WriteLine(packed2); Assert.AreEqual(packed1, packed2); // only checks the primary key Assert.AreEqual(packed1.CollectionName, packed2.CollectionName); CollectionAssert.AreEqual(packed1.Values, packed2.Values); CollectionAssert.AreEqual(packed1.CollectionValues, packed2.CollectionValues); var json1 = Encoding.UTF8.GetString(packed1.ObjectData); var json2 = Encoding.UTF8.GetString(packed2.ObjectData); CollectionAssert.AreEqual(packed1.ObjectData, packed2.ObjectData); }
public void TestPackObject() { var object1 = GetObject1(); var description = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var cached = PackedObject.Pack(object1, description); Assert.IsNotNull(cached); Assert.IsNotNull(cached.PrimaryKey); Assert.AreEqual(cached.PrimaryKey, 11); foreach (var key in cached.Values) { if (key.KeyName == "IndexKeyDate") { Assert.AreEqual(key, new DateTime(2009, 10, 25).Ticks); Assert.AreEqual(key.Type, KeyValue.OriginalType.Date); } if (key.KeyName == "IndexKeyValue") { Assert.AreEqual(key, 15); Assert.AreEqual(key.Type, KeyValue.OriginalType.SomeInteger); } if (key.Type == KeyValue.OriginalType.String) { Assert.AreEqual(key, "FOL"); Assert.AreEqual(key.KeyName, "IndexKeyFolder"); } } var fromCache = PackedObject.Unpack <CacheableTypeOk>(cached); Assert.AreEqual(object1, fromCache); }
public void TimeToLiveRemoveItem() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new TtlEvictionPolicy(TimeSpan.FromSeconds(1)); var item11 = new CacheableTypeOk(11, 11 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed11 = PackedObject.Pack(item11, schema); var item12 = new CacheableTypeOk(12, 12 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed12 = PackedObject.Pack(item12, schema); policy.AddItem(packed11); policy.AddItem(packed12); policy.TryRemove(packed11); Thread.Sleep(1010); var toRemove = policy.DoEviction(); Assert.AreEqual(1, toRemove.Count); Assert.AreEqual(packed12, toRemove.Single()); }
public void UpdateIf(PackedObject newValue, OrQuery testAsQuery) { if (newValue == null) { throw new ArgumentNullException(nameof(newValue)); } var request = new PutRequest(testAsQuery.CollectionName) { ExcludeFromEviction = true, Predicate = testAsQuery }; request.Items.Add(newValue); var response = Channel.SendRequest(request); if (response is ExceptionResponse exResponse) { throw new CacheException("Error while writing an object to the cache", exResponse.Message, exResponse.CallStack); } }
/// <summary> /// Helper function. Enumerate all the objects in the dump /// </summary> /// <param name="path">path of the dump</param> /// <param name="collectionSchema"></param> /// <param name="shardIndex"></param> /// <returns></returns> public static IEnumerable <PackedObject> ObjectsInDump(string path, CollectionSchema collectionSchema, int shardIndex = -1) { var fileMask = shardIndex != -1 ? $"{collectionSchema.CollectionName}_shard{shardIndex:D4}*.txt" : $"{collectionSchema.CollectionName}_shard*.txt"; var files = Directory.GetFiles(path, fileMask); foreach (var file in files) { var content = File.ReadAllText(file); var parts = content.Split(new[] { "\\-" }, StringSplitOptions.RemoveEmptyEntries) .Select(txt => txt.Trim()).Where(t => !string.IsNullOrWhiteSpace(t)).ToList(); foreach (var part in parts) { var cachedObject = PackedObject.PackJson(part, collectionSchema); yield return(cachedObject); } } }
/// <summary> /// When items are stored in the transaction log they are not yet indexed in memory so if they contain full-text data /// it is not tokenized yet. While storing an item from the transaction log in the persistent storage, get the tokenized full-text /// if available or tokenize it otherwise. It is important to avoid tokenization while reloading the database /// </summary> /// <param name="item"></param> /// <returns></returns> PackedObject GetItemWithTokenizedFullText(PackedObject item) { if (item.FullText != null && item.FullText.Length > 0) { var dataStore = Container.TryGetByName(item.CollectionName); PackedObject result = item; if (dataStore != null) { var lockMgr = _serviceContainer.LockManager; lockMgr.DoWithReadLock(() => { if (dataStore.DataByPrimaryKey.TryGetValue(item.PrimaryKey, out var found)) { if (found.TokenizedFullText != null && found.TokenizedFullText.Count > 0 ) // tokenized full-text available { result = found; } } }, dataStore.CollectionSchema.CollectionName); return(result); } } else // no full-text data { return(item); } // It may reach this point when commiting to persistent storage a pending transactions from the transaction log // This happens in the early stages of the startup before loading items into memory item.TokenizedFullText = Tokenizer.Tokenize(item.FullText); return(item); }
public override void RemoveOne(PackedObject item) { // a feed session should not contain a primary key more than once. Can not remove inside a feed session because ordered // indexes are not sorted until the end of the session if (_insideFeedSession) { throw new InvalidOperationException( "Illegal operation during a feed session (RemoveOne was called). Probably due duplicate primary key "); } var index = FindIndexEq(item.Values[_keyIndex], 0, _data.Count); if (index == -1) { throw new InvalidOperationException( $"Can not find item {item} in the index for the Key {KeyInfo.Name}"); } while (index > 0 && _data[index - 1].Values[_keyIndex].CompareTo(item.Values[_keyIndex]) == 0 ) { index--; } if (index != -1) { for (var i = index; i < _data.Count; i++) { if (_data[i].PrimaryKey == item.PrimaryKey) { _data.RemoveAt(i); break; } } } }
public void Test_packing_performance() { var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } } }; var desc = TypedSchemaFactory.FromType <Home>(); const int objects = 10_000; { // warm up var unused = PackedObject.Pack(home, desc); var json = unused.AsJson(); var reloaded = PackedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (var i = 0; i < objects; i++) { var packed = PackedObject.Pack(home, desc); reloaded = PackedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine($"Packing + unpacking {objects} objects took {watch.ElapsedMilliseconds} ms"); } { // warm up desc.UseCompression = true; var unused = PackedObject.Pack(home, desc); var reloaded = PackedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (var i = 0; i < objects; i++) { var packed = PackedObject.Pack(home, desc); reloaded = PackedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine( $"Packing + unpacking {objects} objects with compression took {watch.ElapsedMilliseconds} ms"); } }
public void ComputePivotWithMultipleAxis() { var schema = TypedSchemaFactory.FromType(typeof(Order)); var order1 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order2 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "sf", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order3 = new Order { Amount = 14.5, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 402, Id = Guid.NewGuid(), Quantity = 2 }; var packed1 = PackedObject.Pack(order1, schema); var packed2 = PackedObject.Pack(order2, schema); var packed3 = PackedObject.Pack(order3, schema); // first test with one single axis (Category index = 3) var pivot = new PivotLevel(); pivot.AggregateOneObject(packed1, new List <int> { 3 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int> { 3 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed3, new List <int> { 3 }, new List <int> { 1, 2 }); // Amount and Quantity should be aggregated Assert.AreEqual(2, pivot.AggregatedValues.Count); var agg = pivot.AggregatedValues.First(v => v.ColumnName == "Amount"); Assert.AreEqual(3, agg.Count); Assert.AreEqual(order1.Amount + order2.Amount + order3.Amount, agg.Sum); Assert.IsTrue(pivot.Children.Keys.All(k => k.KeyName == "Category")); Assert.IsTrue(pivot.Children.Values.All(v => v.AxisValue.KeyName == "Category")); var geek = pivot.Children.Values.First(p => p.AxisValue.StringValue == "geek"); Assert.AreEqual(2, geek.AggregatedValues.Count); // then with two axis pivot = new PivotLevel(); pivot.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); Console.WriteLine(pivot.ToString()); var geek1 = pivot.Children.Values.First(p => p.AxisValue.StringValue == "geek"); Assert.AreEqual(2, geek1.AggregatedValues.Count); Assert.AreEqual(2, geek1.Children.Count); // check pivot merging // a new category var order4 = new Order { Amount = 66.5, Date = DateTimeOffset.Now, Category = "student", ClientId = 101, ProductId = 405, Id = Guid.NewGuid(), Quantity = 1 }; var packed4 = PackedObject.Pack(order4, schema); var pivot1 = new PivotLevel(); pivot1.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.AggregateOneObject(packed2, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); var pivot2 = new PivotLevel(); pivot2.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot2.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot2.AggregateOneObject(packed4, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.MergeWith(pivot2); Console.WriteLine(pivot1); // check that an aggregate is equal to the sum of the children var sum1 = pivot1.AggregatedValues.First(v => v.ColumnName == "Amount").Sum; var sum2 = pivot1.Children.Sum(c => c.Value.AggregatedValues.First(v => v.ColumnName == "Amount").Sum); Assert.AreEqual(sum1, sum2); }
public void UpdateIf(PackedObject newValue, OrQuery testAsQuery) { var node = WhichNode(newValue); CacheClients[node].UpdateIf(newValue, testAsQuery); }
public bool TryAdd(string collectionName, PackedObject item) { var node = WhichNode(item); return(CacheClients[node].TryAdd(collectionName, item)); }
private int WhichNode(PackedObject obj) { return(WhichNode(obj.PrimaryKey)); }
/// <summary> /// Return true if the current query matches the specified object /// </summary> /// <param name="item"></param> /// <returns></returns> public abstract bool Match(PackedObject item);
private PackedObject Pack(T item) { return(PackedObject.Pack(item, _collectionSchema, _collectionName)); }