public static void PutMany <T>(this IDataClient @this, IEnumerable <T> items, bool excludeFromEviction = false) { var description = TypeDescriptionsCache.GetDescription(typeof(T)); var schema = description; @this.FeedMany(schema.CollectionName, items.Select(i => PackedObject.Pack(i, schema)), excludeFromEviction); }
public void Remove() { var schema = TypedSchemaFactory.FromType(typeof(TradeLike)); var queue = new EvictionQueue { Capacity = 7, EvictionCount = 2 }; var allItems = new List <PackedObject>(); for (var i = 0; i < 10; i++) { var item = new TradeLike(i, 1000 + i, "aaa", DateTime.Now, 456); var packedItem = PackedObject.Pack(item, schema); queue.AddNew(packedItem); allItems.Add(packedItem); } //items in queue now: 0 1 2 3 4 5 6 7 8 9 queue.TryRemove(allItems[0]); queue.TryRemove(allItems[2]); //items in queue now: 1 3 4 5 6 7 8 9 Assert.IsTrue(queue.EvictionRequired); var evicted = queue.Go(); //items in queue now: 5 6 7 8 9 Assert.AreEqual(evicted.Count, 3); Assert.AreEqual(evicted[0].PrimaryKey, 1); Assert.AreEqual(evicted[1].PrimaryKey, 3); }
public void TestProtobufEncoding() { var schema = TypedSchemaFactory.FromType(typeof(TradeLike)); var builder = new QueryBuilder(typeof(TradeLike)); var kval = new KeyValue(10, schema.KeyByName("Nominal")); var stream = new MemoryStream(); Serializer.Serialize(stream, kval); stream.Seek(0, SeekOrigin.Begin); var reloaded = Serializer.Deserialize <KeyValue>(stream); Assert.AreEqual(kval, reloaded); stream.Seek(0, SeekOrigin.Begin); var obj = new TradeLike(1, 1001, "aaa", new DateTime(2009, 10, 10), 0); var packed = PackedObject.Pack(obj, schema); Serializer.SerializeWithLengthPrefix(stream, packed, PrefixStyle.Fixed32); Serializer.SerializeWithLengthPrefix(stream, packed, PrefixStyle.Fixed32); stream.Seek(0, SeekOrigin.Begin); var t1 = Serializer.DeserializeWithLengthPrefix <PackedObject>(stream, PrefixStyle.Fixed32); Assert.AreEqual(t1.Values[4].ToString(), "0"); var t2 = Serializer.DeserializeWithLengthPrefix <PackedObject>(stream, PrefixStyle.Fixed32); Assert.AreEqual(t2.Values[4].ToString(), "0"); }
public void AddMoreThanCapacity() { var schema = TypedSchemaFactory.FromType(typeof(TradeLike)); var queue = new EvictionQueue(); queue.Capacity = 1000; queue.EvictionCount = 100; for (var i = 0; i < 10000; i++) { var item = new TradeLike(i, 1000 + i, "aaa", DateTime.Now, 456); var packedItem = PackedObject.Pack(item, schema); queue.AddNew(packedItem); } Assert.IsTrue(queue.EvictionRequired); Assert.AreEqual(queue.Count, 10000); ICollection <PackedObject> evicted = queue.Go(); //should have removed 100 more than ( 10000 - 1000 ) Assert.AreEqual(queue.Count, 900); Assert.IsFalse(queue.EvictionRequired); Assert.AreEqual(evicted.Count, 9100); //asking for eviction when bellow maximum capacity will not remove any item evicted = queue.Go(); Assert.AreEqual(evicted.Count, 0); }
private static OrderedIndex Populate(params int[] valueKeys) { var schema = TypedSchemaFactory.FromType <CacheableTypeOk>(); //register the type to get a valid CollectionSchema //the type description is used to create CachedObjects from objects of the registered type var typeDescription = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); KeyInfo valueKey = null; foreach (var keyInfo in typeDescription.IndexFields) { if (keyInfo.Name == "IndexKeyValue") { valueKey = keyInfo; } } Assert.IsNotNull(valueKey); var index = new OrderedIndex(valueKey); for (var i = 0; i < valueKeys.Length; i++) { index.Put(PackedObject.Pack(new CacheableTypeOk(i, 106, "A", DateTime.Now, valueKeys[i]), schema)); } return(index); }
public void TimeToLive() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new TtlEvictionPolicy(TimeSpan.FromSeconds(1)); for (var i = 0; i < 10; i++) { var item = new CacheableTypeOk(i, i + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed = PackedObject.Pack(item, schema); policy.AddItem(packed); } Assert.IsFalse(policy.IsEvictionRequired); Thread.Sleep(1010); var toRemove = policy.DoEviction(); Assert.AreEqual(10, toRemove.Count); var item11 = new CacheableTypeOk(11, 11 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed11 = PackedObject.Pack(item11, schema); policy.AddItem(packed11); toRemove = policy.DoEviction(); Assert.AreEqual(0, toRemove.Count); Thread.Sleep(1010); toRemove = policy.DoEviction(); Assert.AreEqual(1, toRemove.Count); }
public static void PutOne <T>(this IDataClient @this, T item, bool excludeFromEviction = false) { var description = TypeDescriptionsCache.GetDescription(typeof(T)); var schema = description; @this.Put(schema.CollectionName, PackedObject.Pack(item, schema), excludeFromEviction); }
public void Pack_object_with_full_text_indexed_properties() { var description = TypedSchemaFactory.FromType <Home>(); Assert.AreEqual(5, description.FullText.Count); var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } }, Contacts = { "mail", "phone" } }; var packed = PackedObject.Pack(home, description); Assert.AreEqual(7, packed.FullText.Length); Assert.IsTrue(packed.FullText.Any(t => t.Contains("chien qui fume"))); // now pack the same object as json var json = SerializationHelper.ObjectToJson(home); var packed2 = PackedObject.PackJson(json, description); Assert.AreEqual(7, packed2.FullText.Length); Assert.IsTrue(packed2.FullText.Any(t => t.Contains("chien qui fume"))); }
public void StreamUnstreamMessagesOneByOne() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var qbuilder = new QueryBuilder(typeof(CacheableTypeOk)); var put = new PutRequest(typeof(CacheableTypeOk)); var item = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); var typeDescription = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); put.Items.Add(PackedObject.Pack(item, schema)); var remove = new RemoveRequest(typeof(CacheableTypeOk), new KeyValue(1, schema.PrimaryKeyField)); var register = new RegisterTypeRequest(typeDescription); using (var stream = new MemoryStream()) { //request Streamer.ToStream(stream, new GetRequest(qbuilder.FromSql("select from CacheableTypeOk where IndexKeyValue > 1000"))); Streamer.ToStream(stream, put); Streamer.ToStream(stream, remove); Streamer.ToStream(stream, register); //response Streamer.ToStream(stream, new NullResponse()); Streamer.ToStream(stream, new ExceptionResponse(new Exception("fake exception"))); Streamer.ToStream(stream, new ServerDescriptionResponse()); stream.Seek(0, SeekOrigin.Begin); object reloaded = Streamer.FromStream <Request>(stream); Assert.IsTrue(reloaded is GetRequest); //request reloaded = Streamer.FromStream <Request>(stream); Assert.IsTrue(reloaded is PutRequest); reloaded = Streamer.FromStream <Request>(stream); Assert.IsTrue(reloaded is RemoveRequest); reloaded = Streamer.FromStream <Request>(stream); Assert.IsTrue(reloaded is RegisterTypeRequest); ////response reloaded = Streamer.FromStream <Response>(stream); Assert.IsTrue(reloaded is NullResponse); reloaded = Streamer.FromStream <Response>(stream); Assert.IsTrue(reloaded is ExceptionResponse); reloaded = Streamer.FromStream <Response>(stream); Assert.IsTrue(reloaded is ServerDescriptionResponse); } }
private DurableTransaction MakeDeleteTransaction <T>(params T[] items) { var transaction = new DeleteDurableTransaction(); foreach (var item in items) { var packed = PackedObject.Pack(item, _schema); transaction.ItemsToDelete.Add(packed); } return(transaction); }
private PackedObject Pack <T>(T item, string collectionName = null) { var schema = _connector.GetCollectionSchema(collectionName); if (schema == null) { throw new CacheException($"Unknown collection {collectionName}. Use Connector.DeclareCollection"); } var packed = PackedObject.Pack(item, schema, collectionName); return(packed); }
public void PackWithAutomaticPrimaryKey() { var description = TypedSchemaFactory.FromType(typeof(TestData)); var obj = new TestData { Name = "toto" }; var packed = PackedObject.Pack(obj, description); var pk = Guid.Parse(packed.PrimaryKey.ToString()); Assert.AreNotEqual(Guid.Empty, pk); }
public void AddTwiceRaisesAnException() { var schema = TypedSchemaFactory.FromType(typeof(TradeLike)); var queue = new EvictionQueue { Capacity = 1000, EvictionCount = 100 }; var item = new TradeLike(0, 1000, "aaa", DateTime.Now, 456); var packedItem = PackedObject.Pack(item, schema); queue.AddNew(packedItem); //this call should raise an exception Assert.Throws <NotSupportedException>(() => queue.AddNew(packedItem)); }
public void Query_performance() { var schema = TypedSchemaFactory.FromType <Order>(); var queries = WhereClausesForOrders() .Select(w => ExpressionTreeHelper.PredicateToQuery(w, schema.CollectionName)).ToList(); var count = queries.Count; var objects = Order.GenerateTestData(100_000); var packed = objects.Select(o => PackedObject.Pack(o, schema)).ToList(); var ds = new DataStore(schema, new NullEvictionPolicy(), new FullTextConfig()); ds.InternalPutMany(packed, true); var watch = new Stopwatch(); for (var i = 0; i < count; i++) { var qm = new QueryManager(ds); const int iterations = 100; // warm up var returned = qm.ProcessQuery(queries[i]).Count; // run watch.Restart(); for (var j = 0; j < iterations; j++) { returned = qm.ProcessQuery(queries[i]).Count; } watch.Stop(); Console.WriteLine($"{queries[i]} returned {returned} took {watch.ElapsedMilliseconds / iterations} ms"); Console.WriteLine("execution plan:"); Console.WriteLine(qm.ExecutionPlan); Console.WriteLine(); } }
public void Packing_a_binary_object_and_its_json_should_give_identical_results() { var today = DateTime.Today; var now = DateTime.Now; var schema = TypedSchemaFactory.FromType(typeof(AllKindsOfProperties)); var testObj = new AllKindsOfProperties { Id = 15, ValueDate = today, LastUpdate = now, Nominal = 156.32, Quantity = 35, InstrumentName = "IRS", AnotherDate = now, AreYouSure = AllKindsOfProperties.Fuzzy.Maybe, IsDeleted = true, Tags = { "news", "science", "space", "διξ" }, Languages = { "en", "de", "fr" } }; var packed1 = PackedObject.Pack(testObj, schema); var json = SerializationHelper.ObjectToJson(testObj); var packed2 = PackedObject.PackJson(json, schema); Console.WriteLine(packed1); Console.WriteLine(packed2); Assert.AreEqual(packed1, packed2); // only checks the primary key Assert.AreEqual(packed1.CollectionName, packed2.CollectionName); CollectionAssert.AreEqual(packed1.Values, packed2.Values); Assert.AreEqual(packed1.CollectionValues.Length, packed2.CollectionValues.Length); for (int i = 0; i < packed2.CollectionValues.Length; i++) { CollectionAssert.AreEqual(packed1.CollectionValues[i].Values, packed2.CollectionValues[i].Values); } CollectionAssert.AreEqual(packed1.ObjectData, packed2.ObjectData); }
public void PackedObjectSerialization() { var schema = TypedSchemaFactory.FromType(typeof(Person)); var packed = PackedObject.Pack(new Person { Id = 13, First = "Dan", Last = "IONESCU" }, schema); var data = SerializationHelper.ObjectToBytes(packed, SerializationMode.ProtocolBuffers, schema.UseCompression); var reloaded = SerializationHelper.ObjectFromBytes <PackedObject>(data, SerializationMode.ProtocolBuffers, false); Assert.AreEqual(13, reloaded.PrimaryKey.IntValue); Assert.AreEqual("Dan", reloaded.Values.First(k => k.KeyName == "First").StringValue); }
public void LessRecentlyUsedRemoveItem() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new LruEvictionPolicy(10, 2); for (var i = 0; i < 9; i++) { var item = new CacheableTypeOk(i, i + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed = PackedObject.Pack(item, schema); policy.AddItem(packed); } Assert.IsFalse(policy.IsEvictionRequired); var toRemove = policy.DoEviction(); Assert.AreEqual(0, toRemove.Count); var item1 = new CacheableTypeOk(1, 1 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed1 = PackedObject.Pack(item1, schema); policy.TryRemove(packed1); var item10 = new CacheableTypeOk(10, 10 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed10 = PackedObject.Pack(item10, schema); policy.AddItem(packed10); // as one item was removed explicitly the eviction should not be triggered yet toRemove = policy.DoEviction(); Assert.AreEqual(0, toRemove.Count); var item11 = new CacheableTypeOk(11, 11 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed11 = PackedObject.Pack(item11, schema); policy.AddItem(packed11); // now the eviction should be triggered toRemove = policy.DoEviction(); Assert.AreEqual(2, toRemove.Count); // the explicitly removed item should not be in the list Assert.IsFalse(toRemove.Any(i => i == packed1)); }
public void ComputePivotWithServerValues() { var description = TypedSchemaFactory.FromType(typeof(Order)); var order1 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order2 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "sf", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var packed1 = PackedObject.Pack(order1, description); var packed2 = PackedObject.Pack(order2, description); var pivot = new PivotLevel(); // Amount and Quantity to be aggregated (index 1 and 2) in the schema pivot.AggregateOneObject(packed1, new List <int>(), new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int>(), new List <int> { 1, 2 }); // Amount and Quantity should be aggregated Assert.AreEqual(2, pivot.AggregatedValues.Count); var agg = pivot.AggregatedValues.First(v => v.ColumnName == "Amount"); Assert.AreEqual(2, agg.Count); Assert.AreEqual(order1.Amount + order2.Amount, agg.Sum); Console.WriteLine(pivot.ToString()); }
public void StreamUnstreamManyCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var items = new List <PackedObject>(3); var item1 = new CacheableTypeOk(1, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it1 = PackedObject.Pack(item1, schema); items.Add(it1); var item2 = new CacheableTypeOk(2, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it2 = PackedObject.Pack(item2, schema); items.Add(it2); var item3 = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it3 = PackedObject.Pack(item3, schema); items.Add(it3); using (var stream = new MemoryStream()) { Streamer.ToStreamMany(stream, items, new int[0], null); stream.Seek(0, SeekOrigin.Begin); var itemsReceived = 0; Streamer.FromStream(stream, delegate(CacheableTypeOk data, int currentItem, int totalItems) { Assert.IsTrue(currentItem > 0); Assert.IsTrue(currentItem <= totalItems); itemsReceived++; Assert.AreEqual(itemsReceived, data.PrimaryKey); }, delegate { Assert.Fail(); }); Assert.AreEqual(itemsReceived, 3); } }
public void LessRecentlyUsed() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new LruEvictionPolicy(10, 2); for (var i = 0; i < 100; i++) { var item = new CacheableTypeOk(i, i + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed = PackedObject.Pack(item, schema); policy.AddItem(packed); } Assert.IsTrue(policy.IsEvictionRequired); var toRemove = policy.DoEviction(); Assert.AreEqual(92, toRemove.Count); var item93 = new CacheableTypeOk(93, 93 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed93 = PackedObject.Pack(item93, schema); // check that the 93rd item was not removed Assert.IsFalse(toRemove.Any(i => i == packed93)); policy.Touch(packed93); var item100 = new CacheableTypeOk(100, 100 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed100 = PackedObject.Pack(item100, schema); var item101 = new CacheableTypeOk(101, 101 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed101 = PackedObject.Pack(item101, schema); policy.AddItem(packed100); policy.AddItem(packed101); toRemove = policy.DoEviction(); Assert.AreEqual(2, toRemove.Count); // item 93 was not removed because it was recently used (the call to Touch) Assert.IsFalse(toRemove.Any(i => i == packed93)); }
public void StreamUnstreamOneCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var item1 = new CacheableTypeOk(1, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it1 = PackedObject.Pack(item1, schema); using var stream = new MemoryStream(); Streamer.ToStream(stream, it1); stream.Seek(0, SeekOrigin.Begin); var reloaded = Streamer.FromStream <PackedObject>(stream); var original = PackedObject.Unpack <CacheableTypeOk>(reloaded); Assert.IsTrue(original is CacheableTypeOk); }
public void StreamManyUnstreamOneCacheable() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var item = new CacheableTypeOk(3, 1003, "AHA", new DateTime(2010, 10, 02), 8); var it = PackedObject.Pack(item, schema); var oneItemList = new List <PackedObject> { it }; using (var stream = new MemoryStream()) { Streamer.ToStreamMany(stream, oneItemList, new int[0], null); stream.Seek(0, SeekOrigin.Begin); var itemReloaded = Streamer.FromStream <CacheableTypeOk>(stream); Assert.IsNotNull(itemReloaded); Assert.AreEqual(itemReloaded, item); } }
public void Test_functions_vs_queries() { var schema = TypedSchemaFactory.FromType <Order>(); var queries = WhereClausesForOrders() .Select(w => ExpressionTreeHelper.PredicateToQuery(w, schema.CollectionName)).ToList(); var predicates = WhereClausesForOrders().Select(w => w.Compile()).ToList(); var count = queries.Count; Assert.AreEqual(count, predicates.Count); var objects = Order.GenerateTestData(1000); var packed = objects.Select(o => PackedObject.Pack(o, schema)).ToList(); var ds = new DataStore(schema, new NullEvictionPolicy(), new FullTextConfig()); ds.InternalPutMany(packed, true); for (var i = 0; i < count; i++) { var fromObjects = objects.Where(predicates[i]).ToList(); var qm = new QueryManager(ds); var fromDataSource = qm.ProcessQuery(queries[i]); Console.WriteLine($"{queries[i]} returned {fromDataSource.Count}"); Console.WriteLine("execution plan:"); Console.WriteLine(qm.ExecutionPlan); Console.WriteLine(); Assert.AreEqual(fromObjects.Count, fromDataSource.Count); } }
public void Packing_a_binary_object_and_its_json_should_give_identical_results_with_default_index_type() { var schema = TypedSchemaFactory.FromType(typeof(Order)); var testObj = new Order { Amount = 66.5, Date = DateTimeOffset.Now, Category = "student", ClientId = 101, ProductId = 405, Id = Guid.NewGuid(), Quantity = 1, IsDelivered = true }; var description = TypedSchemaFactory.FromType <Order>(); var typeDescription = description; var packed1 = PackedObject.Pack(testObj, schema); var json = SerializationHelper.ObjectToJson(testObj); var packed2 = PackedObject.PackJson(json, typeDescription); Console.WriteLine(packed1); Console.WriteLine(packed2); Assert.AreEqual(packed1, packed2); // only checks the primary key Assert.AreEqual(packed1.CollectionName, packed2.CollectionName); CollectionAssert.AreEqual(packed1.Values, packed2.Values); CollectionAssert.AreEqual(packed1.CollectionValues, packed2.CollectionValues); var json1 = Encoding.UTF8.GetString(packed1.ObjectData); var json2 = Encoding.UTF8.GetString(packed2.ObjectData); CollectionAssert.AreEqual(packed1.ObjectData, packed2.ObjectData); }
public void Compare_packing_result_for_different_methods() { var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } } }; var desc = TypedSchemaFactory.FromType <Home>(); //// warm up //var unused = PackedObject.Pack(home, desc); //var v1 = unused.ToString(); var unused = PackedObject.Pack(home, desc); var v2 = unused.ToString(); var json = SerializationHelper.ObjectToJson(home); unused = PackedObject.PackJson(json, desc); var v3 = unused.ToString(); //Assert.AreEqual(v1, v2); Assert.AreEqual(v2, v3); }
public void TestPackObject() { var object1 = GetObject1(); var description = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var cached = PackedObject.Pack(object1, description); Assert.IsNotNull(cached); Assert.IsNotNull(cached.PrimaryKey); Assert.AreEqual(cached.PrimaryKey, 11); foreach (var key in cached.Values) { if (key.KeyName == "IndexKeyDate") { Assert.AreEqual(key, new DateTime(2009, 10, 25).Ticks); Assert.AreEqual(key.Type, KeyValue.OriginalType.Date); } if (key.KeyName == "IndexKeyValue") { Assert.AreEqual(key, 15); Assert.AreEqual(key.Type, KeyValue.OriginalType.SomeInteger); } if (key.Type == KeyValue.OriginalType.String) { Assert.AreEqual(key, "FOL"); Assert.AreEqual(key.KeyName, "IndexKeyFolder"); } } var fromCache = PackedObject.Unpack <CacheableTypeOk>(cached); Assert.AreEqual(object1, fromCache); }
public void TimeToLiveRemoveItem() { var schema = TypedSchemaFactory.FromType(typeof(CacheableTypeOk)); var policy = new TtlEvictionPolicy(TimeSpan.FromSeconds(1)); var item11 = new CacheableTypeOk(11, 11 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed11 = PackedObject.Pack(item11, schema); var item12 = new CacheableTypeOk(12, 12 + 1000, "aaa", new DateTime(2010, 10, 10), 1500); var packed12 = PackedObject.Pack(item12, schema); policy.AddItem(packed11); policy.AddItem(packed12); policy.TryRemove(packed11); Thread.Sleep(1010); var toRemove = policy.DoEviction(); Assert.AreEqual(1, toRemove.Count); Assert.AreEqual(packed12, toRemove.Single()); }
public void Test_packing_performance() { var home = new Home { Address = "14 rue du chien qui fume", Bathrooms = 2, Rooms = 4, PriceInEuros = 200, CountryCode = "FR", Comments = { new Comment { Text = "Wonderful place", User = "******" }, new Comment { Text = "Very nice apartment" } } }; var desc = TypedSchemaFactory.FromType <Home>(); const int objects = 10_000; { // warm up var unused = PackedObject.Pack(home, desc); var json = unused.AsJson(); var reloaded = PackedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (var i = 0; i < objects; i++) { var packed = PackedObject.Pack(home, desc); reloaded = PackedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine($"Packing + unpacking {objects} objects took {watch.ElapsedMilliseconds} ms"); } { // warm up desc.UseCompression = true; var unused = PackedObject.Pack(home, desc); var reloaded = PackedObject.Unpack <Home>(unused); var watch = new Stopwatch(); watch.Start(); for (var i = 0; i < objects; i++) { var packed = PackedObject.Pack(home, desc); reloaded = PackedObject.Unpack <Home>(unused); } watch.Stop(); Console.WriteLine( $"Packing + unpacking {objects} objects with compression took {watch.ElapsedMilliseconds} ms"); } }
private PackedObject Pack(T item) { return(PackedObject.Pack(item, _collectionSchema, _collectionName)); }
public void ComputePivotWithMultipleAxis() { var schema = TypedSchemaFactory.FromType(typeof(Order)); var order1 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order2 = new Order { Amount = 123.45, Date = DateTimeOffset.Now, Category = "sf", ClientId = 101, ProductId = 401, Id = Guid.NewGuid(), Quantity = 2 }; var order3 = new Order { Amount = 14.5, Date = DateTimeOffset.Now, Category = "geek", ClientId = 101, ProductId = 402, Id = Guid.NewGuid(), Quantity = 2 }; var packed1 = PackedObject.Pack(order1, schema); var packed2 = PackedObject.Pack(order2, schema); var packed3 = PackedObject.Pack(order3, schema); // first test with one single axis (Category index = 3) var pivot = new PivotLevel(); pivot.AggregateOneObject(packed1, new List <int> { 3 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int> { 3 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed3, new List <int> { 3 }, new List <int> { 1, 2 }); // Amount and Quantity should be aggregated Assert.AreEqual(2, pivot.AggregatedValues.Count); var agg = pivot.AggregatedValues.First(v => v.ColumnName == "Amount"); Assert.AreEqual(3, agg.Count); Assert.AreEqual(order1.Amount + order2.Amount + order3.Amount, agg.Sum); Assert.IsTrue(pivot.Children.Keys.All(k => k.KeyName == "Category")); Assert.IsTrue(pivot.Children.Values.All(v => v.AxisValue.KeyName == "Category")); var geek = pivot.Children.Values.First(p => p.AxisValue.StringValue == "geek"); Assert.AreEqual(2, geek.AggregatedValues.Count); // then with two axis pivot = new PivotLevel(); pivot.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed2, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); Console.WriteLine(pivot.ToString()); var geek1 = pivot.Children.Values.First(p => p.AxisValue.StringValue == "geek"); Assert.AreEqual(2, geek1.AggregatedValues.Count); Assert.AreEqual(2, geek1.Children.Count); // check pivot merging // a new category var order4 = new Order { Amount = 66.5, Date = DateTimeOffset.Now, Category = "student", ClientId = 101, ProductId = 405, Id = Guid.NewGuid(), Quantity = 1 }; var packed4 = PackedObject.Pack(order4, schema); var pivot1 = new PivotLevel(); pivot1.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.AggregateOneObject(packed2, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); var pivot2 = new PivotLevel(); pivot2.AggregateOneObject(packed1, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot2.AggregateOneObject(packed3, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot2.AggregateOneObject(packed4, new List <int> { 3, 4 }, new List <int> { 1, 2 }); pivot1.MergeWith(pivot2); Console.WriteLine(pivot1); // check that an aggregate is equal to the sum of the children var sum1 = pivot1.AggregatedValues.First(v => v.ColumnName == "Amount").Sum; var sum2 = pivot1.Children.Sum(c => c.Value.AggregatedValues.First(v => v.ColumnName == "Amount").Sum); Assert.AreEqual(sum1, sum2); }