public static void Run() { Console.WriteLine("Connecting to Postgres Store..."); var monkies = new PGDocumentList <Monkey>("northwindPG"); monkies.Clear(); var sw = new Stopwatch(); Console.WriteLine("Loading 10,000 documents"); sw.Start(); var addRange = new List <Monkey>(); for (int i = 0; i < 10000; i++) { addRange.Add(new Monkey { ID = i, Name = "MONKEY " + i, Birthday = DateTime.Today, Description = "The Monkey on my back" }); } var inserted = monkies.AddRange(addRange); sw.Stop(); Console.WriteLine("Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); Console.WriteLine("Loading 100,000 documents"); sw.Reset(); addRange.Clear(); monkies.Clear(); sw.Start(); for (int i = 0; i < 100000; i++) { addRange.Add(new Monkey { ID = i, Name = "MONKEY " + i, Birthday = DateTime.Today, Description = "The Monkey on my back" }); } inserted = monkies.AddRange(addRange); sw.Stop(); Console.WriteLine("Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); //use a DB that has an int PK sw.Reset(); sw.Start(); Console.WriteLine("Loading {0}...", inserted); monkies.Reload(); sw.Stop(); Console.WriteLine("Loaded {0} documents from Postgres in {1}ms", inserted, sw.ElapsedMilliseconds); sw.Reset(); sw.Start(); Console.WriteLine("Querying Middle 100 Documents"); var found = monkies.Where(x => x.ID > 100 && x.ID < 500); sw.Stop(); Console.WriteLine("Queried {0} documents in {1}ms", found.Count(), sw.ElapsedMilliseconds); }
public void CreatesSerialPK() { var actors = new PGDocumentList <Actor>("northwindPG"); var newActor = new Actor { First_Name = "Joe", Last_Name = "Blow" }; actors.Add(newActor); Assert.True(newActor.Actor_ID > 0); }
public void Bulk_Inserts_Documents_With_String_PK() { int INSERT_QTY = 100; var monkies = new PGDocumentList<MonkeyDocument>("chinookPG"); monkies.Clear(); var addRange = new List<MonkeyDocument>(); for (int i = 0; i < INSERT_QTY; i++) { addRange.Add(new MonkeyDocument { Name = "MONKEY " + i, Birthday = DateTime.Today, Description = "The Monkey on my back" }); } var inserted = monkies.AddRange(addRange); Assert.True(inserted == INSERT_QTY); }
public PGDocs() { // Start fresh each time, with no existing table, to keep serial PK's from exploding: if(this.TableExists("clientdocuments")) { this.DropTable("clientdocuments"); } if (this.TableExists("monkeydocuments")) { this.DropTable("monkeydocuments"); } clientDocs = new PGDocumentList<ClientDocument>(_connectionStringName); monkeyDocs = new PGDocumentList<MonkeyDocument>(_connectionStringName); clientDocs.Clear(); }
public void CreatesSerialPK() { var actors = new PGDocumentList <Actor>("northwindPG"); var newActor = new Actor { First_Name = "Joe", Last_Name = "Blow" }; actors.Add(newActor); int newId = newActor.Actor_ID; actors.Reload(); Assert.True(actors.Any(a => a.Actor_ID == newId)); }
static void InsertsManyActors() { var actors = new PGDocumentList <Actor>("northwindPG"); var bulkList = new List <Actor>(); for (int i = 0; i < 100; i++) { var newActor = new Actor { First_Name = "Actor " + i, Last_Name = "Test" }; bulkList.Add(newActor); } actors.AddRange(bulkList); Assert.True(actors.Last().Actor_ID > 90); }
public void RecordsPopulated() { docs.Clear(); var newCustomer = new CustomerDocument { Email = "*****@*****.**", First = "Buddy", Last = "Conery" }; docs.Add(newCustomer); //load a new, separate list var customers = new PGDocumentList <CustomerDocument>("dvds"); //there should be some records here based on the above Assert.Equal("*****@*****.**", customers.First().Email); Assert.Equal("Buddy", customers.First().First); Assert.Equal("Conery", customers.First().Last); }
static void InsertsManyMonkeys() { int INSERT_QTY = 10000; var monkies = new PGDocumentList <Monkey>("northwindPG"); monkies.Clear(); var addRange = new List <Monkey>(); for (int i = 0; i < INSERT_QTY; i++) { addRange.Add(new Monkey { Name = "MONKEY " + i, Birthday = DateTime.Today, Description = "The Monkey on my back" }); } var inserted = monkies.AddRange(addRange); Assert.True(inserted == INSERT_QTY && monkies.Count == inserted); }
public static void Run() { Console.WriteLine("==========================================================="); Console.WriteLine("POSTGRES - LOAD A BUNCH OF DOCUMENTS INTO A TABLE"); Console.WriteLine("==========================================================="); Console.WriteLine("Connecting to Postgres Document Store..."); // Start clean and fresh . . . if (Benchmarks.TableExists("clientdocuments")) { Benchmarks.DropTable("clientdocuments"); } var _clientDocuments = new PGDocumentList<ClientDocument>(_connectionStringName); _clientDocuments.Clear(); var sw = new Stopwatch(); var addRange = new List<ClientDocument>(); for (int i = 0; i < 10000; i++) { addRange.Add(new ClientDocument { LastName = "Conery " + i, FirstName = "Rob", Email = "*****@*****.**" }); } sw.Start(); var inserted = _clientDocuments.AddRange(addRange); sw.Stop(); Console.WriteLine("\t Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); // Start clean and fresh again . . . _clientDocuments.Clear(); addRange.Clear(); Benchmarks.DropTable("clientdocuments"); _clientDocuments = new PGDocumentList<ClientDocument>(_connectionStringName); sw.Reset(); Console.WriteLine("Loading 100,000 documents"); for (int i = 0; i < 100000; i++) { addRange.Add(new ClientDocument { LastName = "Conery " + i, FirstName = "Rob", Email = "*****@*****.**" }); } sw.Start(); inserted = _clientDocuments.AddRange(addRange); sw.Stop(); Console.WriteLine("\t Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); //use a DB that has an int PK sw.Reset(); Console.WriteLine("Loading {0}...", inserted); sw.Start(); _clientDocuments.Reload(); sw.Stop(); Console.WriteLine("\t Loaded {0} documents from Postgres in {1} ms", inserted, sw.ElapsedMilliseconds); sw.Reset(); Console.WriteLine("Querying Middle 100 Documents"); sw.Start(); var found = _clientDocuments.Where(x => x.ClientDocumentId > 100 && x.ClientDocumentId < 500); sw.Stop(); Console.WriteLine("\t Queried {0} documents in {1}ms", found.Count(), sw.ElapsedMilliseconds); }
static void Bulk_Inserts_Documents_With_Serial_PK() { int insertQty = 100; var ClientDocuments = new PGDocumentList<ClientDocument>("chinookPG"); var bulkList = new List<ClientDocument>(); for (int i = 0; i < insertQty; i++) { var newClientDocument = new ClientDocument { FirstName = "ClientDocument " + i, LastName = "Test", Email = "*****@*****.**" }; bulkList.Add(newClientDocument); } int inserted = ClientDocuments.AddRange(bulkList); var last = ClientDocuments.Last(); Assert.True(inserted == insertQty && last.ClientDocumentId >= insertQty); }
public static void Run() { Console.WriteLine("==========================================================="); Console.WriteLine("POSTGRES - LOAD A BUNCH OF DOCUMENTS INTO A TABLE"); Console.WriteLine("==========================================================="); Console.WriteLine("Connecting to Postgres Document Store..."); // Start clean and fresh . . . if (Benchmarks.TableExists("clientdocuments")) { Benchmarks.DropTable("clientdocuments"); } var _clientDocuments = new PGDocumentList<ClientDocument>(_connectionStringName); _clientDocuments.Clear(); var sw = new Stopwatch(); var addRange = new List<ClientDocument>(); for (int i = 0; i < 10000; i++) { addRange.Add(new ClientDocument { LastName = "Conery " + i, FirstName = "Rob", Email = "*****@*****.**" }); } sw.Start(); var inserted = _clientDocuments.AddRange(addRange); sw.Stop(); Console.WriteLine("\t Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); // Start clean and fresh again . . . _clientDocuments.Clear(); addRange.Clear(); Benchmarks.DropTable("clientdocuments"); _clientDocuments = new PGDocumentList<ClientDocument>(_connectionStringName); sw.Reset(); Console.WriteLine("Loading 100,000 documents"); for (int i = 0; i < 100000; i++) { addRange.Add(new ClientDocument { LastName = "Conery " + i, FirstName = "Rob", Email = "*****@*****.**" }); } sw.Start(); inserted = _clientDocuments.AddRange(addRange); sw.Stop(); Console.WriteLine("\t Just inserted {0} as documents in {1} ms", inserted, sw.ElapsedMilliseconds); //use a DB that has an int PK sw.Reset(); Console.WriteLine("Loading {0}...", inserted); sw.Start(); _clientDocuments.Reload(); sw.Stop(); Console.WriteLine("\t Loaded {0} documents from Postgres in {1} ms", inserted, sw.ElapsedMilliseconds); sw.Reset(); Console.WriteLine("Querying Middle 100 Documents"); sw.Start(); var found = _clientDocuments.Where(x => x.ClientDocumentId > 100 && x.ClientDocumentId < 500); sw.Stop(); Console.WriteLine("\t Queried {0} documents in {1}ms", found.Count(), sw.ElapsedMilliseconds); sw.Reset(); Console.WriteLine("Adds Items in a loop, follows by Bulk Insert"); if (TableExists("items")) { DropTable("items"); } sw.Start(); var list = new List<Item>(); for (int i = 1; i < 6; i++) { list.Add(new Item() { Name = "Item no " + i }); } var items = new PGDocumentList<Item>("chinookPG"); // 1. Add items in a loop foreach (var item in list) { items.Add(item); } // 2. Add items using AddRange... items.AddRange(list); sw.Stop(); Console.WriteLine("\t Added {0} items in a loop, then added same items as bullk insert in {1}", list.Count(), sw.ElapsedMilliseconds); }
public PGDocs() { docs = new PGDocumentList <CustomerDocument>("dvds"); //drop and reload docs.Clear(); }
static void Bulk_Inserts_Documents_With_Serial_PK() { int insertQty = 100; var ClientDocuments = new PGDocumentList<ClientDocument>("chinookPG"); var bulkList = new List<ClientDocument>(); for (int i = 0; i < insertQty; i++) { var newClientDocument = new ClientDocument { FirstName = "ClientDocument " + i, LastName = "Test", Email = "*****@*****.**" }; bulkList.Add(newClientDocument); } int inserted = ClientDocuments.AddRange(bulkList); // I have NO IDEA why this silliness suddenly became necessary (the .ToList().Last() bit). // This test worked fine last nite. Now says PGDocumentList<> has no extension Last() JA - 3/12/2014 Assert.True(inserted == insertQty && ClientDocuments.ToList().Last().ClientDocumentId > insertQty); }