public void TestWritingNotInOrder() { const string subFolderName = "TestWritingNotInOrder"; const int desiredFileSize = 100; Mock.MultiLevelDataRouter router = new Mock.MultiLevelDataRouter(2, 2); IRepositoryFolder targetFolder = Repository.RootFolder.GetDescendant(FixtureRootRepoFolder.LogicalPath, false).CreateSubfolder(subFolderName); targetFolder.Properties.DesiredItemsPerFile = desiredFileSize; const int itemCount = 100000; const int intervalMinutes = 1; IDataItem[] data = GetTestData(itemCount, DateTime.Now, intervalMinutes); for (int j = 0; j < 2; ++j) { using (IRepositoryWriter writer = GetWriter(targetFolder, router)) { for (int n = j; n < itemCount; n = n + 2) { writer.Write(dataItem: data[n]); } } } // the following commented out call is necessary if writing was done instandalone instance because target folder does not know that subfolders have been created // during writing; but now I am not using standalone writer //targetFolder.Refresh(true, true); CheckAllDataInFolder(targetFolder, data); }
public static void Populate(IRepositoryWriter writer, int count) { for (int n = 0; n < count; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); writer.Write(item); } writer.Flush(); }
private void Save(Catalogue catalogue) { // Create a writer. IRepositoryWriter writer = CreateWriter(); // Write the catalogue and close the file. writer.Write(catalogue); writer.Close(); }
public void WriteTest() { string subFolderName = "WriteTest"; int initialSubfoldersCount = Repository.RootFolder.SubFolders.Count; IRepositoryFolder targetFolder = FixtureRootRepoFolder.CreateSubfolder(subFolderName); IRepositoryWriter writer = targetFolder.GetWriter(); writer.AllowSubfoldersCreation = true; string fullPath = targetFolder.FullPath; Mock.TestDataItem item; for (int n = 0; n < 100; ++n) { item = Mock.TestDataItem.GetTestItem(n); writer.Write(item); } // testing automatic subfolder creation item = Mock.TestDataItem.GetTestItem(1000); item.RelativePath = "AutoSubfolder1/Auto2"; writer.Write(item); Assert.AreEqual(1, targetFolder.SubFolders.Count, "Automatic subfolder creation during write failed"); Assert.IsNotNull(targetFolder.GetSubFolder("AutoSubfolder1")); Assert.AreEqual(1, targetFolder.GetSubFolder("AutoSubfolder1").SubFolders.Count); Assert.IsNotNull(targetFolder.GetSubFolder("AutoSubfolder1").GetSubFolder("Auto2")); writer.Flush(); writer.Close(); targetFolder.Delete(true, true); Assert.IsFalse(Directory.Exists(fullPath), "Directory not removed from disk"); Assert.AreEqual(initialSubfoldersCount, Repository.RootFolder.SubFolders.Count); }
public void GetUnsavedItemsWithFlushTest() { const int subfolderCount = 3; string subFolderName = "GetUnsavedItemsTest"; IRepositoryFolder targetFolder = FixtureRootRepoFolder.CreateSubfolder(subFolderName); IRepositoryWriter writer = targetFolder.GetWriter(); IDataRouter dataRouter = new Mock.NumberedDataRouter(subfolderCount); writer.DataRouter = dataRouter; writer.AllowSubfoldersCreation = true; string fullPath = targetFolder.FullPath; Mock.TestDataItem item; int lastFlushCount = 0; for (int n = 0; n < 10000; ++n) { item = Mock.TestDataItem.GetTestItem(n); writer.Write(item); if ((n + 1) % 10 == 0) { IDictionary <string, IList <IDataItem> > unsavedItems = writer.GetUnsavedItems(); Assert.IsNotNull(unsavedItems); Assert.AreEqual(Math.Min(n + 1, subfolderCount), unsavedItems.Count, "Unsaved items dictionary entry count is not equal to the direct writers count"); Assert.AreEqual(n + 1 - lastFlushCount, unsavedItems.Values.Sum((l) => l.Count), "Total number of unsaved items incorrect"); } else if ((n + 1) % 134 == 0) { writer.Flush(); lastFlushCount = n + 1; IDictionary <string, IList <IDataItem> > unsavedItems = writer.GetUnsavedItems(); Assert.IsNotNull(unsavedItems); Assert.AreEqual(Math.Min(n + 1, subfolderCount), unsavedItems.Count, "Unsaved items dictionary entry count is not equal to the direct writers count"); Assert.AreEqual(0, unsavedItems.Values.Sum((l) => l.Count), "Total number of unsaved items after flush must be 0"); } } writer.Close(); }
public void QuickReaderTest() { string targetFolderName = "QuickReaderTest"; IRepositoryFolder targetFolder = FixtureRootRepoFolder.GetSubFolder(targetFolderName); if (targetFolder != null) { targetFolder.Delete(true, true); } targetFolder = FixtureRootRepoFolder.CreateSubfolder(targetFolderName); string targetFolderPath = targetFolder.FullPath; const int subfolderCount = 3; const int itemsIntervalHours = 1; const int desiredFileSize = 2000; targetFolder.Properties.DesiredItemsPerFile = desiredFileSize; IRepositoryWriter writer = targetFolder.GetWriter(); IDataRouter dataRouter = new NumberedDataRouter(subfolderCount); writer.DataRouter = dataRouter; writer.AllowSubfoldersCreation = true; DateTime firstTime = DateTime.Now.AddDays(-10); DateTime lastTime = DateTime.MinValue; int itemsCount = 100000; int n; for (n = 0; n < itemsCount; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); lastTime = firstTime.AddHours(n * itemsIntervalHours); item.DateTime = lastTime; writer.Write(item); } writer.Flush(); writer.Close(); // will test lazy loading targetFolder.UnloadSubfolders(); Assert.IsTrue(targetFolder.SubFolders.Count == subfolderCount, "Router had to make writer create the configured number of subfolders"); IRepositoryFolder firstItemSubfolder = targetFolder.GetDescendant( dataRouter.GetRelativePath(Mock.TestDataItem.GetTestItem(0)), false); Assert.AreEqual(firstTime, firstItemSubfolder.FirstTimestamp , "Fisrt item timestamp reported incorrectly by Folder.FirstTimestamp"); Assert.AreEqual(firstTime, targetFolder.GetFirstItemTimestamp(true, false) , "Fisrt item timestamp reported incorrectly by Folder.GetFirstItemTimestamp"); IRepositoryReader reader = targetFolder.GetReader(firstTime, true); Assert.IsTrue(reader.HasData, "Folder just populated but no data can be read"); IDataItemRead ritem = null; n = 0; IRepositoryReader altReader = null; SeekStatusListener seekStatusListener = new SeekStatusListener(); while (reader.HasData) { if (n > 0 && n % 100 == 0) { altReader = Repository.ObjectFactory.GetReader(reader.Position, seekStatusListener.HanldeStatus); } ritem = reader.Read(); Assert.IsNotNull(ritem, "reader.Read() returned null after returning true from HasData"); Assert.AreNotSame(targetFolder, ritem.RepositoryFolder, "Router failed"); Assert.IsInstanceOf <Mock.TestDataItem>(ritem.DataItem, "Data item read from repository is of different type"); Assert.AreEqual(firstTime.AddHours(n * itemsIntervalHours), ritem.DataItem.DateTime); ((Mock.TestDataItem)ritem.DataItem).Check(n); if (altReader != null) { IDataItemRead altItem = altReader.Read(); Assert.AreEqual(ritem.DataItem.DateTime, altItem.DataItem.DateTime); Assert.AreEqual(0, seekStatusListener.Statuses.Count); } ++n; } Assert.AreEqual(lastTime, ritem.DataItem.DateTime, "Last item has unexpected timestamp"); Assert.AreEqual(itemsCount, n, "Unexpected number of data items read"); DateTime timestampToSeek = firstTime.AddHours(desiredFileSize / 3 * itemsIntervalHours); reader.Seek(timestampToSeek); Assert.IsTrue(reader.HasData, "Repeated Seek after reading all failed"); ritem = reader.Read(); Assert.IsNotNull(ritem); Assert.AreEqual(timestampToSeek, ritem.DataItem.DateTime, "First read item timestamp unexpected"); reader.Direction = bfs.Repository.Util.EnumerationDirection.Backwards; Assert.IsTrue(reader.HasData, "No data after reversing in the middle of data"); //ritem = reader.Read(); //Assert.AreEqual<DateTime>(timestampToSeek, ritem.DataItem.DateTime // , "First read item timestamp unexpected after changing direction"); n = 0; altReader = null; while (reader.HasData) { if (n > 0 && n % 100 == 0) { if (altReader != null) { altReader.Dispose(); } altReader = Repository.ObjectFactory.GetReader(reader.Position, seekStatusListener.HanldeStatus); } ritem = reader.Read(); Assert.IsNotNull(ritem, "reader.Read() returned null after returning true from HasData"); Assert.AreEqual(timestampToSeek.AddHours(-n * itemsIntervalHours), ritem.DataItem.DateTime); if (altReader != null) { IDataItemRead altItem = altReader.Read(); Assert.AreEqual(ritem.DataItem.DateTime, altItem.DataItem.DateTime); } ++n; } Assert.AreEqual(firstTime, ritem.DataItem.DateTime, "Did not pick up first item after reversing"); // reversing after reaching end reader.Direction = bfs.Repository.Util.EnumerationDirection.Forwards; ritem = reader.Read(); Assert.IsNotNull(ritem, "Did not read firts item reversing after reaching end"); Assert.AreEqual(firstTime, ritem.DataItem.DateTime, "Did not pick up first item after reversing after reaching end"); // cleanup //targetFolder.Delete(true, false); //Assert.IsFalse(Directory.Exists(targetFolderPath), "Test repo directory not removed from disk by Delete()"); }
public void MyClassInitialize() { lock (GetType()) { if (_emptyDataFolder == null) { const int itemsCount = 100000; _daysPerFile = ((double)_dataItemsPerFile) * _itemsIntervalMinutes / 60.0 / 24.0; // 200 * 20 minutes = 8000 minutes per file (5.55556 days) DateTime firstTime = DateTime.Now.AddDays(-10); _firstDataItemTime = firstTime; //_expectedFileCount = (int)Math.Ceiling((double)itemsCount / (double)_dataItemsPerFile); IFolder targetFolder = (IFolder)FixtureRootRepoFolder; string targetFolderPath = targetFolder.FullPath; targetFolder.Properties.DesiredItemsPerFile = _dataItemsPerFile; using (IRepositoryWriter writer = targetFolder.GetWriter()) { DateTime lastTime = DateTime.MinValue; int n; for (n = 0; n < itemsCount; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); lastTime = firstTime.AddMinutes(n * _itemsIntervalMinutes); item.DateTime = lastTime; writer.Write(item); } _lastDataItemTime = lastTime; writer.Flush(); writer.Close(); } for ( var dataFile = targetFolder.RootDataFolder.FindFirstDataFile(false); dataFile != null; dataFile = dataFile.GetNext(false), ++_expectedFileCount ) { } Console.WriteLine("Expected file count enumerated via RepositoryFile: {0}", _expectedFileCount); // data folder boundaries may split data files thus extra ones Assert.GreaterOrEqual(_expectedFileCount, (int)Math.Ceiling((double)itemsCount / (double)_dataItemsPerFile), "Data file count unexpected"); // creating empty folder IRepositoryFile file = targetFolder.RootDataFolder.Seek(firstTime.AddMinutes(itemsCount * _itemsIntervalMinutes / 3), false); _emptyDataFolder = file.ContainingFolder; for ( file = _emptyDataFolder.FindFirstDataFile(false); file != null && file.ContainingFolder == _emptyDataFolder; file = file.GetNext(false)) { file.Delete(); --_expectedFileCount; } Assert.AreEqual(0, _emptyDataFolder.DataFileBrowser.FileCount); Console.WriteLine("Expected file count after removing file by file: {0}", _expectedFileCount); // IDataFolder dfolderToDelete = _emptyDataFolder.ParentDataFolder.GetNextSiblingInTree(false).GetNextSiblingInTree(false); Assert.AreEqual(1, dfolderToDelete.Level); _deletedDataFolder = new RepoFileContainerDescriptor() { Start = dfolderToDelete.Start, End = dfolderToDelete.End, Level = dfolderToDelete.Level, RelativePath = dfolderToDelete.RelativePath }; _expectedFileCount -= dfolderToDelete.GetSubfolders(DateTime.MinValue, false).Sum((f) => f.DataFileBrowser.FileCount); Console.WriteLine("Expected file count after removing data folder {0}: {1}", dfolderToDelete.PathInRepository, _expectedFileCount); Console.WriteLine("Removing folder {0}", dfolderToDelete.PathInRepository); dfolderToDelete.Delete(false); Assert.IsFalse(dfolderToDelete.Exists); } } //lock }
public void TestUnsavedItemsWithInterruption() { const string subFolderName = "TestUnsavedItemsWithInterruption"; const int desiredFileSize = 100; Mock.MultiLevelDataRouter router = new Mock.MultiLevelDataRouter(3, 2); Repository.RootFolder.GetDescendant(FixtureRootRepoFolder.LogicalPath, false).CreateSubfolder(subFolderName); IRepositoryWriter writer = GetStandaloneWriter(subFolderName, desiredFileSize, router); const int itemCount = 100000; const int intervalMinutes = 1; int intervalSameFolderMinutes = intervalMinutes * router.SubtreeFolderCount; IDataItem[] data = GetTestData(itemCount, DateTime.Now, intervalMinutes); const int checkIntervalItemCountBase = 57; Assume.That(checkIntervalItemCountBase > router.SubtreeFolderCount); var random = new Random(); int nextCheckCount = checkIntervalItemCountBase; int stopAndRestartCounter = itemCount / 2 + random.Next(-itemCount / 5, itemCount / 5); for (int n = 0; n < itemCount; ++n) { writer.Write(dataItem: data[n]); if (n == nextCheckCount) { nextCheckCount = nextCheckCount + checkIntervalItemCountBase + random.Next(20) - 10; IDictionary <string, IList <IDataItem> > unsavedItemsDict = writer.GetUnsavedItems(); Assert.AreEqual(router.SubtreeFolderCount, unsavedItemsDict.Count); Dictionary <string, DateTime> lastSavedTimestamps = new Dictionary <string, DateTime>(); foreach (KeyValuePair <string, IList <IDataItem> > pair in unsavedItemsDict) { string relativePath = pair.Key.Substring(writer.Folder.LogicalPath.Length); IRepositoryFolder sourceFolder = writer.Folder.GetDescendant(relativePath, false); Assert.IsNotNull(sourceFolder); DateTime lastFlushedTimestamp = sourceFolder.LastTimestamp; if (pair.Value.Count > 0) { if (lastFlushedTimestamp > DateTime.MinValue) { Assert.AreEqual(lastFlushedTimestamp.AddMinutes(intervalSameFolderMinutes), pair.Value[0].DateTime); } else { Assert.Less(n, desiredFileSize * (router.SubtreeFolderCount + 1), "Data must have been flushed by now due to desired file size"); } lastSavedTimestamps[relativePath] = pair.Value[pair.Value.Count - 1].DateTime; } else { Assert.AreNotEqual(DateTime.MinValue, lastFlushedTimestamp); lastSavedTimestamps[relativePath] = lastFlushedTimestamp; } } DateTime lastSavedTimestampTotal = lastSavedTimestamps.Values.Max(); List <DateTime> lastSavedPerFolder = lastSavedTimestamps.Values.ToList(); lastSavedPerFolder.Sort(); for (int j = 0; j < router.SubtreeFolderCount - 2; ++j) { Assert.AreEqual(lastSavedPerFolder[j].AddMinutes(intervalMinutes), lastSavedPerFolder[j + 1]); } Assert.AreEqual(lastSavedPerFolder[router.SubtreeFolderCount - 1], data[n].DateTime); } // if (n == nextCheckCount) if (n == stopAndRestartCounter) { var unsavedItemsDict = writer.GetUnsavedItems(); writer = GetStandaloneWriter(subFolderName, desiredFileSize, router); var unsavedList = MergeUnsavedItems(unsavedItemsDict); foreach (var dataItem in unsavedList) { writer.Write(dataItem); } } } IRepositoryFolder targetFolder = writer.Folder; writer.Close(); CheckAllDataInFolder(targetFolder, data); }
public void GetUnsavedItemsAmbientTransactionTest() { const int subfolderCount = 3; const string subFolderName = "GetUnsavedItemsAmbientTransactionTest"; IRepositoryFolder targetFolder = FixtureRootRepoFolder.CreateSubfolder(subFolderName); IRepositoryWriter writer = targetFolder.GetWriter(); targetFolder.Properties.DesiredItemsPerFile = 100; IDataRouter dataRouter = new Mock.NumberedDataRouter(subfolderCount); writer.DataRouter = dataRouter; writer.AllowSubfoldersCreation = true; string fullPath = targetFolder.FullPath; Mock.TestDataItem item; IDictionary <string, IList <IDataItem> > unsavedItems; using (TransactionScope scope = new TransactionScope()) { Assert.IsNotNull(Transaction.Current); const int count = 10000; for (int n = 0; n < count; ++n) { item = Mock.TestDataItem.GetTestItem(n); writer.Write(item); if ((n + 1) % 134 == 0) { writer.Flush(); unsavedItems = writer.GetUnsavedItems(); Assert.IsNotNull(unsavedItems); Assert.AreEqual(Math.Min(n + 1, subfolderCount), unsavedItems.Count , "Unsaved items dictionary entry count is not equal to the direct writers count"); Assert.AreEqual(n + 1, unsavedItems.Values.Sum((l) => l.Count) , "Total number of unsaved items after flush must not change if in ambient transaction"); } } unsavedItems = writer.GetUnsavedItems(); Assert.IsNotNull(unsavedItems); Assert.AreEqual(subfolderCount, unsavedItems.Count , "Unsaved items dictionary entry count is not equal to the direct writers count"); Assert.AreEqual(count, unsavedItems.Values.Sum((l) => l.Count) , "Total number of unsaved items must equal number of added items if in ambient transaction"); scope.Complete(); } Thread.Sleep(50); unsavedItems = writer.GetUnsavedItems(); Assert.IsNotNull(unsavedItems); Assert.AreEqual(subfolderCount, unsavedItems.Count , "Unsaved items dictionary entry count is not equal to the direct writers count"); Assert.AreEqual(0, unsavedItems.Values.Sum((l) => l.Count) , "Total number of unsaved items after committing ambient transaction must be 0"); writer.Close(); }