Exemplo n.º 1
0
        public void time_inserts <TSerializer, TRegistry>(Target[] data)
            where TSerializer : ISerializer
            where TRegistry : MartenRegistry, new()
        {
            var container = Container.For <DevelopmentModeRegistry>();

            container.Configure(_ => _.For <ISerializer>().Use <TSerializer>());


            // Completely removes all the database schema objects for the
            // Target document type
            container.GetInstance <DocumentCleaner>().CompletelyRemoveAll();

            // Apply the schema customizations
            container.GetInstance <IDocumentSchema>().Alter <TRegistry>();


            using (var session = container.GetInstance <IDocumentStore>().OpenSession())
            {
                var store = container.GetInstance <IDocumentStore>();

                // Once to warm up
                var time = Timings.Time(() => { store.BulkInsert(data); });

                var description =
                    $"{data.Length} documents / {typeof(TSerializer).Name} / {typeof(TRegistry).Name}: {time}";

                Debug.WriteLine(description);

                _timings[typeof(TSerializer)].Record <TRegistry>(data.Length, time);
            }
        }
Exemplo n.º 2
0
        public double Insert(IContainer container, Target[] data)
        {
            var batch = container.GetInstance <UpdateBatch>();

            var unitofwork = container.GetInstance <UnitOfWork>();

            return(Timings.Time(() =>
            {
                unitofwork.Store(data);
                unitofwork.ApplyChanges(batch);
            }));
        }
Exemplo n.º 3
0
        public void generate_data()
        {
            //theContainer.Inject<ISerializer>(new TestsSerializer());

            theContainer.GetInstance <DocumentCleaner>().CompletelyRemove(typeof(Target));

            // Get Roslyn spun up before measuring anything
            var schema = theContainer.GetInstance <IDocumentSchema>();

            var storage = theContainer.GetInstance <IDocumentStore>().As <DocumentStore>().Storage;

            storage.MappingFor(typeof(Target)).As <DocumentMapping>().DuplicateField("Date");

            storage.StorageFor(typeof(Target)).ShouldNotBeNull();

            theContainer.GetInstance <DocumentCleaner>().DeleteDocumentsFor(typeof(Target));


            var session = theContainer.GetInstance <IDocumentStore>().OpenSession();
            var store   = theContainer.GetInstance <IDocumentStore>();

            var data = Target.GenerateRandomData(10000).ToArray();

            Timings.Time(() => { store.BulkInsert(data); });


            var theDate = DateTime.Today.AddDays(3);

            var one = Timings.Time(() =>
            {
                var sql = "select data from mt_doc_target where (data ->> 'Date')::date = ?";
                session.Query <Target>(sql, theDate).ToArray().Length.ShouldBeGreaterThan(0);
            });


            var two = Timings.Time(() =>
            {
                var sql =
                    "select r.data from mt_doc_target as r, LATERAL jsonb_to_record(r.data) as l(\"Date\" date) where l.\"Date\" = ?";

                session.Query <Target>(sql, theDate).ToArray().Count().ShouldBeGreaterThan(0);
            });

            var three = Timings.Time(() =>
            {
                var sql =
                    "select r.data from mt_doc_target as r where r.date = ?";

                session.Query <Target>(sql, theDate).ToArray().Count().ShouldBeGreaterThan(0);
            });

            Debug.WriteLine($"json locator: {one}, lateral join: {two}, searchable field: {three}");
        }
Exemplo n.º 4
0
        public void time_query <TSerializer, TRegistry>(Target[] data)
            where TSerializer : ISerializer
            where TRegistry : MartenRegistry, new()
        {
            var container = Container.For <DevelopmentModeRegistry>();

            container.Configure(_ => _.For <ISerializer>().Use <TSerializer>());


            // Completely removes all the database schema objects for the
            // Target document type
            container.GetInstance <DocumentCleaner>().CompletelyRemoveAll();

            // Apply the schema customizations
            container.GetInstance <IDocumentSchema>().Alter <TRegistry>();


            using (var session = container.GetInstance <IDocumentStore>().OpenSession())
            {
                var store = container.GetInstance <IDocumentStore>();

                store.BulkInsert(data);

                var theDate   = data.ElementAt(0).Date;
                var queryable = session.Query <Target>().Where(x => x.Date == theDate);

                Debug.WriteLine(store.Diagnostics.CommandFor(queryable).CommandText);

                // Once to warm up
                var time = Timings.Time(() => { queryable.ToArray().Length.ShouldBeGreaterThan(0); });


                var times = new double[5];
                for (var i = 0; i < 5; i++)
                {
                    times[i] = Timings.Time(() => { queryable.ToArray().Length.ShouldBeGreaterThan(0); });
                }

                var average = times.Average(x => x);

                var description =
                    $"{data.Length} documents / {typeof (TSerializer).Name} / {typeof (TRegistry).Name}: {average}";

                Debug.WriteLine(description);

                _timings[typeof(TSerializer)].Record <TRegistry>(data.Length, average);
            }
        }
Exemplo n.º 5
0
        public double Insert(IContainer container, Target[] data)
        {
            var schema     = container.GetInstance <IDocumentSchema>();
            var runner     = container.GetInstance <IManagedConnection>();
            var storage    = schema.StorageFor(typeof(Target));
            var serializer = container.GetInstance <ISerializer>();

            return(Timings.Time(() =>
            {
//                connection.ExecuteInTransaction((conn, tx) =>
//                {
//                    data.Each(t =>
//                    {
//                        throw new NotSupportedException("This mechanism is no long supported");
//                        //var cmd = storage.UpsertCommand(t, serializer.ToJson(t));
//                        //cmd.Connection = conn;
//                        //cmd.Transaction = tx;
//                        //cmd.ExecuteNonQuery();
//                    });
//                });
            }));
        }
Exemplo n.º 6
0
        public double Insert(IContainer container, Target[] data)
        {
            var store = container.GetInstance <IDocumentStore>();

            return(Timings.Time(() => store.BulkInsert(data)));
        }
Exemplo n.º 7
0
        public void generate_data()
        {
            theContainer.Inject <ISerializer>(new JilSerializer());
            //theContainer.Inject<ISerializer>(new NetJSONSerializer());

            theContainer.GetInstance <DocumentCleaner>().CompletelyRemove(typeof(Target));
            // Get Roslyn spun up before measuring anything
            var schema = theContainer.GetInstance <IDocumentSchema>();

            schema.MappingFor(typeof(Target)).DuplicateField("Date");

            schema.StorageFor(typeof(Target)).ShouldNotBeNull();

            theContainer.GetInstance <DocumentCleaner>().DeleteDocumentsFor(typeof(Target));


            var runner     = theContainer.GetInstance <CommandRunner>();
            var serializer = theContainer.GetInstance <ISerializer>();

            for (int i = 0; i < 10; i++)
            {
                var data = Target.GenerateRandomData(10000).ToArray();
                Timings.Time("Using BinaryImport", () =>
                {
                    runner.Execute(conn =>
                    {
                        using (var writer = conn.BeginBinaryImport("COPY mt_doc_target (id, data, date) FROM STDIN BINARY"))
                        {
                            data.Each(x =>
                            {
                                writer.StartRow();
                                writer.Write(x.Id, NpgsqlDbType.Uuid);
                                writer.Write(serializer.ToJson(x), NpgsqlDbType.Jsonb);
                                writer.Write(x.Date, NpgsqlDbType.Date);
                            });
                        }
                    });
                });
            }



            var session = theContainer.GetInstance <IDocumentSession>();



            var theDate = DateTime.Today.AddDays(3);

            Timings.Time("Fetching as is", () =>
            {
                session.Query <Target>().Where(x => x.Date == theDate).ToArray().Length.ShouldBeGreaterThan(0);
            });


            /*
             * SELECT r.id
             * FROM resources AS r,
             * LATERAL jsonb_to_record(r.fields) AS l(polled integer)
             * WHERE l.polled > 50;
             */

            Timings.Time("Fetching with lateral join", () =>
            {
                var sql =
                    "select r.data from mt_doc_target as r, LATERAL jsonb_to_record(r.data) as l(\"Date\" date) where l.\"Date\" = ?";

                session.Query <Target>(sql, theDate).ToArray().Count().ShouldBeGreaterThan(0);
            });

            Timings.Time("Fetching with duplicated field", () =>
            {
                var sql =
                    "select r.data from mt_doc_target as r where r.date = ?";

                session.Query <Target>(sql, theDate).ToArray().Count().ShouldBeGreaterThan(0);
            });
        }