public static void Populate(IRepositoryWriter writer, int count) { for (int n = 0; n < count; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); writer.Write(item); } writer.Flush(); }
public void QuickReaderTest() { string targetFolderName = "QuickReaderTest"; IRepositoryFolder targetFolder = FixtureRootRepoFolder.GetSubFolder(targetFolderName); if (targetFolder != null) { targetFolder.Delete(true, true); } targetFolder = FixtureRootRepoFolder.CreateSubfolder(targetFolderName); string targetFolderPath = targetFolder.FullPath; const int subfolderCount = 3; const int itemsIntervalHours = 1; const int desiredFileSize = 2000; targetFolder.Properties.DesiredItemsPerFile = desiredFileSize; IRepositoryWriter writer = targetFolder.GetWriter(); IDataRouter dataRouter = new NumberedDataRouter(subfolderCount); writer.DataRouter = dataRouter; writer.AllowSubfoldersCreation = true; DateTime firstTime = DateTime.Now.AddDays(-10); DateTime lastTime = DateTime.MinValue; int itemsCount = 100000; int n; for (n = 0; n < itemsCount; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); lastTime = firstTime.AddHours(n * itemsIntervalHours); item.DateTime = lastTime; writer.Write(item); } writer.Flush(); writer.Close(); // will test lazy loading targetFolder.UnloadSubfolders(); Assert.IsTrue(targetFolder.SubFolders.Count == subfolderCount, "Router had to make writer create the configured number of subfolders"); IRepositoryFolder firstItemSubfolder = targetFolder.GetDescendant( dataRouter.GetRelativePath(Mock.TestDataItem.GetTestItem(0)), false); Assert.AreEqual(firstTime, firstItemSubfolder.FirstTimestamp , "Fisrt item timestamp reported incorrectly by Folder.FirstTimestamp"); Assert.AreEqual(firstTime, targetFolder.GetFirstItemTimestamp(true, false) , "Fisrt item timestamp reported incorrectly by Folder.GetFirstItemTimestamp"); IRepositoryReader reader = targetFolder.GetReader(firstTime, true); Assert.IsTrue(reader.HasData, "Folder just populated but no data can be read"); IDataItemRead ritem = null; n = 0; IRepositoryReader altReader = null; SeekStatusListener seekStatusListener = new SeekStatusListener(); while (reader.HasData) { if (n > 0 && n % 100 == 0) { altReader = Repository.ObjectFactory.GetReader(reader.Position, seekStatusListener.HanldeStatus); } ritem = reader.Read(); Assert.IsNotNull(ritem, "reader.Read() returned null after returning true from HasData"); Assert.AreNotSame(targetFolder, ritem.RepositoryFolder, "Router failed"); Assert.IsInstanceOf <Mock.TestDataItem>(ritem.DataItem, "Data item read from repository is of different type"); Assert.AreEqual(firstTime.AddHours(n * itemsIntervalHours), ritem.DataItem.DateTime); ((Mock.TestDataItem)ritem.DataItem).Check(n); if (altReader != null) { IDataItemRead altItem = altReader.Read(); Assert.AreEqual(ritem.DataItem.DateTime, altItem.DataItem.DateTime); Assert.AreEqual(0, seekStatusListener.Statuses.Count); } ++n; } Assert.AreEqual(lastTime, ritem.DataItem.DateTime, "Last item has unexpected timestamp"); Assert.AreEqual(itemsCount, n, "Unexpected number of data items read"); DateTime timestampToSeek = firstTime.AddHours(desiredFileSize / 3 * itemsIntervalHours); reader.Seek(timestampToSeek); Assert.IsTrue(reader.HasData, "Repeated Seek after reading all failed"); ritem = reader.Read(); Assert.IsNotNull(ritem); Assert.AreEqual(timestampToSeek, ritem.DataItem.DateTime, "First read item timestamp unexpected"); reader.Direction = bfs.Repository.Util.EnumerationDirection.Backwards; Assert.IsTrue(reader.HasData, "No data after reversing in the middle of data"); //ritem = reader.Read(); //Assert.AreEqual<DateTime>(timestampToSeek, ritem.DataItem.DateTime // , "First read item timestamp unexpected after changing direction"); n = 0; altReader = null; while (reader.HasData) { if (n > 0 && n % 100 == 0) { if (altReader != null) { altReader.Dispose(); } altReader = Repository.ObjectFactory.GetReader(reader.Position, seekStatusListener.HanldeStatus); } ritem = reader.Read(); Assert.IsNotNull(ritem, "reader.Read() returned null after returning true from HasData"); Assert.AreEqual(timestampToSeek.AddHours(-n * itemsIntervalHours), ritem.DataItem.DateTime); if (altReader != null) { IDataItemRead altItem = altReader.Read(); Assert.AreEqual(ritem.DataItem.DateTime, altItem.DataItem.DateTime); } ++n; } Assert.AreEqual(firstTime, ritem.DataItem.DateTime, "Did not pick up first item after reversing"); // reversing after reaching end reader.Direction = bfs.Repository.Util.EnumerationDirection.Forwards; ritem = reader.Read(); Assert.IsNotNull(ritem, "Did not read firts item reversing after reaching end"); Assert.AreEqual(firstTime, ritem.DataItem.DateTime, "Did not pick up first item after reversing after reaching end"); // cleanup //targetFolder.Delete(true, false); //Assert.IsFalse(Directory.Exists(targetFolderPath), "Test repo directory not removed from disk by Delete()"); }
public void MyClassInitialize() { lock (GetType()) { if (_emptyDataFolder == null) { const int itemsCount = 100000; _daysPerFile = ((double)_dataItemsPerFile) * _itemsIntervalMinutes / 60.0 / 24.0; // 200 * 20 minutes = 8000 minutes per file (5.55556 days) DateTime firstTime = DateTime.Now.AddDays(-10); _firstDataItemTime = firstTime; //_expectedFileCount = (int)Math.Ceiling((double)itemsCount / (double)_dataItemsPerFile); IFolder targetFolder = (IFolder)FixtureRootRepoFolder; string targetFolderPath = targetFolder.FullPath; targetFolder.Properties.DesiredItemsPerFile = _dataItemsPerFile; using (IRepositoryWriter writer = targetFolder.GetWriter()) { DateTime lastTime = DateTime.MinValue; int n; for (n = 0; n < itemsCount; ++n) { Mock.TestDataItem item = Mock.TestDataItem.GetTestItem(n); lastTime = firstTime.AddMinutes(n * _itemsIntervalMinutes); item.DateTime = lastTime; writer.Write(item); } _lastDataItemTime = lastTime; writer.Flush(); writer.Close(); } for ( var dataFile = targetFolder.RootDataFolder.FindFirstDataFile(false); dataFile != null; dataFile = dataFile.GetNext(false), ++_expectedFileCount ) { } Console.WriteLine("Expected file count enumerated via RepositoryFile: {0}", _expectedFileCount); // data folder boundaries may split data files thus extra ones Assert.GreaterOrEqual(_expectedFileCount, (int)Math.Ceiling((double)itemsCount / (double)_dataItemsPerFile), "Data file count unexpected"); // creating empty folder IRepositoryFile file = targetFolder.RootDataFolder.Seek(firstTime.AddMinutes(itemsCount * _itemsIntervalMinutes / 3), false); _emptyDataFolder = file.ContainingFolder; for ( file = _emptyDataFolder.FindFirstDataFile(false); file != null && file.ContainingFolder == _emptyDataFolder; file = file.GetNext(false)) { file.Delete(); --_expectedFileCount; } Assert.AreEqual(0, _emptyDataFolder.DataFileBrowser.FileCount); Console.WriteLine("Expected file count after removing file by file: {0}", _expectedFileCount); // IDataFolder dfolderToDelete = _emptyDataFolder.ParentDataFolder.GetNextSiblingInTree(false).GetNextSiblingInTree(false); Assert.AreEqual(1, dfolderToDelete.Level); _deletedDataFolder = new RepoFileContainerDescriptor() { Start = dfolderToDelete.Start, End = dfolderToDelete.End, Level = dfolderToDelete.Level, RelativePath = dfolderToDelete.RelativePath }; _expectedFileCount -= dfolderToDelete.GetSubfolders(DateTime.MinValue, false).Sum((f) => f.DataFileBrowser.FileCount); Console.WriteLine("Expected file count after removing data folder {0}: {1}", dfolderToDelete.PathInRepository, _expectedFileCount); Console.WriteLine("Removing folder {0}", dfolderToDelete.PathInRepository); dfolderToDelete.Delete(false); Assert.IsFalse(dfolderToDelete.Exists); } } //lock }