public void PopulateReadAllDeleteItems_Test() { // set default directory ahead of LogLevel Sop.Log.Logger.DefaultLogDirectory = "c:\\SopBin"; //Sop.Log.Logger.Instance.LogLevel = Sop.Log.LogLevels.Verbose; //** IStoreFactory sf = new StoreFactory(); PeopleStore = sf.GetPersistentValue <long, Person>(Server.SystemFile.Store, "People"); PeopleStoreByName = sf.GetPersistentKey <PersonKey, long>(Server.SystemFile.Store, "PeopleByName", new PersonComparer()); // repeat Populate, Read all, Delete each Item routines: iterate for n times... const int iterationCount = 6; for (int i = 0; i < iterationCount; i++) { Populate(); ReadAll(); if (i % 2 == 0) { DeleteEachItem(); } else { PeopleStore.Delete(); PeopleStoreByName.Delete(); } Assert.IsTrue(PeopleStore.Count == 0, "Expected 0 elements but found {0}", PeopleStore.Count); } }
void Populate() { int ZipCodeCtr = 5000; Person[] NewPeople = new Person[1000]; int NewPeopleIndex = 0; bool oneTimeUpdateRead = true; for (int i = 0; i < MaxCount; i++) { int pid = GetNextSequence(); Person p = new Person() { PersonID = pid, Key = new PersonKey() { FirstName = string.Format("Joe{0}", pid), LastName = string.Format("Peter{0}", pid) }, PhoneNumber = "510-555-9999" }; PeopleStore.Add(p.PersonID, p); NewPeople[NewPeopleIndex++] = p; //** do inserts on People Store By Name every batch of 1000 records //** to minimize disk I/O head jumps, causing more optimal insertion times... if (NewPeopleIndex == 1000) { foreach (Person np in NewPeople) { PeopleStoreByName.Add(np.Key, np.PersonID); } NewPeopleIndex = 0; } //** NOTE: SOP supports very large transactions. //** In this case we've set it to commit every x00,000 insertions on two tables. //** Each one of these operations is a high speed operation and requires fairly reasonable resource footprint if (i > 0 && i % TransactionSize == 0) { if (NewPeopleIndex > 0) { for (int i2 = 0; i2 < NewPeopleIndex; i2++) { Person np = NewPeople[i2]; PeopleStoreByName.Add(np.Key, np.PersonID); } NewPeopleIndex = 0; } ZipCodeCtr++; } } if (NewPeopleIndex > 0) { for (int i2 = 0; i2 < NewPeopleIndex; i2++) { Person np = NewPeople[i2]; PeopleStoreByName.Add(np.Key, np.PersonID); } } }
void ReadAll() { PeopleStoreByName.MoveFirst(); PersonKey pk; int Ctr = 0; long[] Pids = new long[1000]; int i = 0; bool personMissing = false; do { Ctr++; pk = PeopleStoreByName.CurrentKey; long PersonID = PeopleStoreByName.CurrentValue; Pids[i++] = PersonID; if (i == 1000) { //** query a thousand people... batching like this is optimal use of SOP container... QueryResult <long>[] People; foreach (var pid in Pids) { if (!PeopleStore.Search(pid)) { personMissing = true; Assert.Fail("Person with ID {0} not found.", pid); } } i = 0; } } while (PeopleStoreByName.MoveNext()); if (i > 0) { QueryResult <long>[] People; long[] d = new long[i]; Array.Copy(Pids, 0, d, 0, i); foreach (var l in d) { if (!PeopleStore.Search(l)) { personMissing = true; Assert.Fail("Person with ID {0} not found.", l); } } } if (personMissing) { Assert.Fail("Failed! Mising person detected."); return; } if (Ctr != PeopleStore.Count) { Assert.Fail("Failed! Read {0}, expected {1}", Ctr, PeopleStore.Count); } else { Console.WriteLine("Read {0} items.", Ctr); } }
void Populate() { int ZipCodeCtr = 5000; Person[] PeopleBuffer = new Person[BatchCount]; int PeopleBufferIndex = 0; for (int i = 0; i < MaxCount; i++) { int pid = (int)PeopleStore.GetNextSequence(); Person p = new Person() { PersonID = pid, Key = new PersonKey() { FirstName = string.Format("Joe{0}", pid), LastName = string.Format("Peter{0}", pid) }, PhoneNumber = "510-555-9999" }; PeopleStore.Add(p.PersonID, p); PeopleBuffer[PeopleBufferIndex++] = p; //** Insert to PeopleStoreByName every batch of BatchCount People //** This allows optimal insertion across two tables as low level File pointer jumps are minimized if (PeopleBufferIndex == BatchCount) { PeopleStore.Flush(); foreach (Person p2 in PeopleBuffer) { PeopleStoreByName.Add(p2.Key, p2.PersonID); } PeopleStoreByName.Flush(); PeopleBufferIndex = 0; } if (i % 500 == 0) { ZipCodeCtr++; } } if (PeopleBufferIndex > 0) { PeopleStore.Flush(); foreach (Person p2 in PeopleBuffer) { PeopleStoreByName.Add(p2.Key, p2.PersonID); } PeopleStoreByName.Flush(); } else { PeopleStore.Flush(); PeopleStoreByName.Flush(); } }
void Populate() { int ZipCodeCtr = 5000; for (int i = 0; i < MaxCount; i++) { int aid = (int)AddressStore.GetNextSequence(); Address addr = new Address() { AddressID = aid, Key = new AddressKey() { Street = string.Format("143{0} LoveLane", aid), City = "Fremont", Country = "USA", State = "California", ZipCode = ZipCodeCtr.ToString() } }; int pid = (int)PeopleStore.GetNextSequence(); Person p = new Person() { PersonID = pid, AddressID = addr.AddressID, Key = new PersonKey() { FirstName = string.Format("Joe{0}", pid), LastName = string.Format("Peter{0}", pid) }, PhoneNumber = "510-555-9999" }; AddressStore.Add(addr.AddressID, addr); PeopleStore.Add(p.PersonID, p); PeopleStoreByName.Add(p.Key, p.PersonID); AddressStoreByAddress.Add(addr.Key, addr.AddressID); if (i % 500 == 0) { ZipCodeCtr++; AddressStore.Flush(); PeopleStore.Flush(); PeopleStoreByName.Flush(); AddressStoreByAddress.Flush(); } } AddressStore.Flush(); PeopleStore.Flush(); PeopleStoreByName.Flush(); AddressStoreByAddress.Flush(); }
void ReadAll() { PeopleStoreByName.MoveFirst(); PeopleStoreByName.HintBatchCount = 103; long[] Pids = new long[BatchCount]; int PidsIndex = 0; PersonKey[] pk = new PersonKey[BatchCount]; int Ctr = 0; do { Ctr++; pk[PidsIndex++] = PeopleStoreByName.CurrentKey; if (PidsIndex == BatchCount) { QueryResult <PersonKey>[] PeopleIDs; if (PeopleStoreByName.Query(QueryExpression <PersonKey> .Package(pk), out PeopleIDs)) { for (int i = 0; i < PeopleIDs.Length; i++) { Pids[i] = (long)PeopleIDs[i].Value; } QueryResult <long>[] PeopleFound; if (PeopleStore.Query(QueryExpression <long> .Package(Pids), out PeopleFound)) { long[] Aids = new long[PidsIndex]; int i = 0; foreach (QueryResult <long> pf in PeopleFound) { if (pf.Found) { Aids[i++] = ((Person)pf.Value).AddressID; } } QueryResult <long>[] AddressesFound; if (AddressStore.Query(QueryExpression <long> .Package(Aids), out AddressesFound)) { //** process found Address records here... } } } PidsIndex = 0; } } while (PeopleStoreByName.MoveNext()); }
void ReadAll() { PeopleStoreByName.MoveFirst(); PeopleStoreByName.HintBatchCount = 103; PersonKey pk; int Ctr = 0; do { Ctr++; pk = PeopleStoreByName.CurrentKey; long PersonID = PeopleStoreByName.CurrentValue; if (PeopleStore.Search(PersonID)) { Person p = PeopleStore.CurrentValue; if (AddressStore.Search(p.AddressID)) { Address addr = AddressStore.CurrentValue; } } } while (PeopleStoreByName.MoveNext()); }
//** read all the 5 million records void ReadAll() { PeopleStoreByName.MoveFirst(); PeopleStoreByName.HintBatchCount = 303; PersonKey pk; int Ctr = 0; long[] Pids = new long[1000]; int i = 0; bool personMissing = false; do { Ctr++; pk = PeopleStoreByName.CurrentKey; long PersonID = PeopleStoreByName.CurrentValue; Pids[i++] = PersonID; if (i == 1000) { //** query a thousand people... batching like this is optimal use of SOP container... QueryResult <long>[] People; if (PeopleStore.Query(QueryExpression <long> .Package(Pids), out People)) { foreach (var p in People) { if (!p.Found) { personMissing = true; Console.WriteLine("Person with ID {0} not found.", p.Key); } } } i = 0; } } while (PeopleStoreByName.MoveNext()); if (i > 0) { QueryResult <long>[] People; long[] d = new long[i]; Array.Copy(Pids, 0, d, 0, i); if (PeopleStore.Query(QueryExpression <long> .Package(d), out People)) { foreach (var p in People) { if (!p.Found) { personMissing = true; Console.WriteLine("Person with ID {0} not found.", p.Key); } } } } if (personMissing) { Console.WriteLine("Failed! Mising person detected."); return; } if (Ctr != PeopleStore.Count) { Console.WriteLine("Failed! Read {0}, expected {1}", Ctr, PeopleStore.Count); } else { Console.WriteLine("Read {0} items.", Ctr); } }
void DeleteEachItem() { if (server.Transaction == null) { server.BeginTransaction(); } PeopleStoreByName.MoveFirst(); PeopleStoreByName.HintBatchCount = 303; PersonKey pk; int Ctr = 0; long[] Pids = new long[1000]; int i = 0; bool personMissing = false; do { Ctr++; pk = PeopleStoreByName.CurrentKey; long PersonID = PeopleStoreByName.CurrentValue; Pids[i++] = PersonID; if (i == 1000) { //** query a thousand people... batching like this is optimal use of SOP container... QueryResult <long>[] People; if (PeopleStore.Remove(QueryExpression <long> .Package(Pids), out People)) { foreach (var p in People) { if (!p.Found) { personMissing = true; Assert.Fail("Person with ID {0} not found.", p.Key); } } } else { Assert.Fail("Failed to Remove a 1,000 people starting with PID {0}.", Pids[0]); } i = 0; } } while (PeopleStoreByName.MoveNext()); if (i > 0) { QueryResult <long>[] People; long[] d = new long[i]; Array.Copy(Pids, 0, d, 0, i); if (PeopleStore.Remove(QueryExpression <long> .Package(d), out People)) { foreach (var p in People) { if (!p.Found) { personMissing = true; Assert.Fail("Person with ID {0} not found.", p.Key); } } } } if (personMissing) { Assert.Fail("Failed! Mising person detected."); return; } if (Ctr != MaxCount) { Assert.Fail("Failed! Deleted {0}, expected {1}", Ctr, MaxCount); } else { Console.WriteLine("Deleted {0} items on 2ndary store.", Ctr); } Ctr = 0; while (PeopleStoreByName.MoveFirst()) { Ctr++; pk = PeopleStoreByName.CurrentKey; PeopleStoreByName.Remove(); } Console.WriteLine("Deleted {0} items.", Ctr); server.Commit(); }
void ReadAll() { PeopleStoreByName.MoveFirst(); PeopleStoreByName.HintBatchCount = 103; long[] Pids = new long[BatchCount]; int PidsIndex = 0; PersonKey[] pk = new PersonKey[BatchCount]; int Ctr = 0; do { Ctr++; pk[PidsIndex++] = PeopleStoreByName.CurrentKey; if (PidsIndex == BatchCount) { QueryResult <PersonKey>[] PeopleIDs; if (PeopleStoreByName.Query(QueryExpression <PersonKey> .Package(pk), out PeopleIDs)) { for (int i = 0; i < PeopleIDs.Length; i++) { Pids[i] = (long)PeopleIDs[i].Value; } QueryResult <long>[] PeopleFound; if (PeopleStore.Query(QueryExpression <long> .Package(Pids), out PeopleFound)) { long[] Aids = new long[PidsIndex]; int i = 0; foreach (QueryResult <long> pf in PeopleFound) { if (pf.Found) { Aids[i++] = ((Person)pf.Value).AddressID; } } QueryResult <long>[] AddressesFound; if (AddressStore.Query(QueryExpression <long> .Package(Aids), out AddressesFound)) { //** process found Address records here... int ctr2 = 0; foreach (var a in AddressesFound) { ctr2++; if (!a.Found) { Console.WriteLine("Failed to read {0}.", a.Key); } } if (ctr2 != 1000) { Console.WriteLine("Failed to read 1000 records, 'only read {0}.", ctr2); } } } } PidsIndex = 0; } } while (PeopleStoreByName.MoveNext()); if (Ctr != PeopleStore.Count) { Console.WriteLine("Failed! Read {0}, expected {1}", Ctr * 4, PeopleStore.Count * 4); } else { Console.WriteLine("Read {0} items.", Ctr * 4); } }
void Populate() { int ZipCodeCtr = 5000; CacheRecord[] BatchedRecords = new CacheRecord[BatchCount]; int BatchedIndex = 0; for (int i = 0; i < MaxCount; i++) { int aid = (int)AddressStore.GetNextSequence(); Address addr = new Address() { AddressID = aid, Key = new AddressKey() { Street = string.Format("143{0} LoveLane", aid), City = "Fremont", Country = "USA", State = "California", ZipCode = ZipCodeCtr.ToString() } }; int pid = (int)PeopleStore.GetNextSequence(); Person p = new Person() { PersonID = pid, AddressID = addr.AddressID, Key = new PersonKey() { FirstName = string.Format("Joe{0}", pid), LastName = string.Format("Peter{0}", pid) }, PhoneNumber = "510-555-9999" }; BatchedRecords[BatchedIndex++] = new CacheRecord() { p = p, pKey = p.Key, addr = addr, addrKey = addr.Key }; if (BatchedIndex == BatchCount) { for (int i2 = 0; i2 < BatchedIndex; i2++) { AddressStore.Add(BatchedRecords[i2].addr.AddressID, BatchedRecords[i2].addr); } AddressStore.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { PeopleStore.Add(BatchedRecords[i2].p.PersonID, BatchedRecords[i2].p); } PeopleStore.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { PeopleStoreByName.Add(BatchedRecords[i2].p.Key, BatchedRecords[i2].p.PersonID); } PeopleStoreByName.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { AddressStoreByAddress.Add(BatchedRecords[i2].addr.Key, BatchedRecords[i2].addr.AddressID); } AddressStoreByAddress.Flush(); if (i % 500 == 0) { ZipCodeCtr++; } BatchedIndex = 0; } } if (BatchedIndex > 0) { for (int i2 = 0; i2 < BatchedIndex; i2++) { AddressStore.Add(BatchedRecords[i2].addr.AddressID, BatchedRecords[i2].addr); } AddressStore.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { PeopleStore.Add(BatchedRecords[i2].p.PersonID, BatchedRecords[i2].p); } PeopleStore.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { PeopleStoreByName.Add(BatchedRecords[i2].p.Key, BatchedRecords[i2].p.PersonID); } PeopleStoreByName.Flush(); for (int i2 = 0; i2 < BatchedIndex; i2++) { AddressStoreByAddress.Add(BatchedRecords[i2].addr.Key, BatchedRecords[i2].addr.AddressID); } AddressStoreByAddress.Flush(); } }