public void Insert1000DataIndexEntriesWithDuplicateCheckingDisabled() { Delete (); using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath)) { for (int i = 1; i < 1001; i++) { var dataIndex = new DataIndex (i, i, i, 50); dataIndexProcessor.AddIndex (dataIndex); #if DEBUG Console.WriteLine("INSERT: {0}", i); #endif } } Assert.IsTrue (true); }
public void AddDataIndexFailsDuplicate() { Delete(); //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath); var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath); bool duplicateFound = false; try { var dataIndex = new DataIndex(1, 1, 100, 50); dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex); dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex); } catch (ConcurrencyException ex) { if (!String.IsNullOrEmpty (ex.Message)) duplicateFound = true; #if DEBUG Console.WriteLine(ex.Message); #endif } Assert.True(duplicateFound); }
public void Insert1000DataIndexEntriesWithDuplicateCheckingEnabledWith8KBReadCache() { Delete (); using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath, DiskBufferSize.Small, DiskBufferSize.Small)) { for (int i = 1; i < 1001; i++) { var dataIndex = new DataIndex (i, i, i, 50); dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex); #if DEBUG Console.WriteLine("INSERT: {0}", i); #endif } } Assert.IsTrue (true); }
public void Insert1000DataIndexEntriesWithDuplicateCheckingEnabledRandomRead() { Delete (); using (var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath, DiskBufferSize.Default, DiskBufferSize.Default)) { // adding a random index forces the check for duplicates to search the full index. var dataIndex2 = new DataIndex (100000000, 100, 100, 50); dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex2); for (int i = 1; i < 1001; i++) { var dataIndex = new DataIndex (i, i, i, 50); dataIndexProcessor.AddIndexCheckForDuplicate (dataIndex); #if DEBUG Console.WriteLine("INSERT: {0}", i); #endif } } Assert.IsTrue (true); }
public void DeleteDataIndexSuccess() { Delete(); //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath); var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath); var dataIndex = new DataIndex(1, 1, 100, 50); dataIndexProcessor.AddIndex(dataIndex); dataIndexProcessor.RemoveIndex(1); bool doesExist = dataIndexProcessor.DoesIndexExist(1); Assert.False (doesExist); }
public void AddDataIndexSuccess() { Delete(); //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath); var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath); var dataIndex = new DataIndex(1, 1, 100, 50); dataIndexProcessor.AddIndex(dataIndex); Assert.True (true); }
public void GetDataIndexWithEnoughSpaceReturnsNull() { Delete(); //var deletedDataIndexProcessor = new DeletedDataIndexProcessor(_deletedDataIndexPath); var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath); // insert an index with a total space of 200 bytes (100 * 0.2) var dataIndex = new DataIndex(1, 1, 100, 50); dataIndexProcessor.AddIndexCheckForDuplicate(dataIndex); var loaded = dataIndexProcessor.GetDataIndexWithEnoughSpace(500); Assert.IsNull(loaded); }
public void DeleteIndexAddIndexCheckOverwrite() { Delete(); var dataIndexProcessor = new DataIndexProcessor(_dataIndexPath); // insert an index with a total space of 200 bytes (100 * 0.2) var dataIndex = new DataIndex(1, 1, 100, 0); var dataIndex2 = new DataIndex(2, 1, 100, 0); dataIndexProcessor.AddIndexOverwriteDeleted(dataIndex); dataIndexProcessor.RemoveIndex(1); var fileSizeBeforeAdd = dataIndexProcessor.FileSize; dataIndexProcessor.AddIndexOverwriteDeleted(dataIndex2); Assert.AreEqual(fileSizeBeforeAdd, dataIndexProcessor.FileSize); }