public void TestLargeDocumentSplitOk() { GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); options.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue / 1000; var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases.ExtractionStoreOptions, "largeDocumentTest"); var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, adapter, 2, null); var mockModel = Mock.Of <IModel>(); processor.Model = mockModel; var dataset = new DicomDataset { new DicomUnlimitedText(DicomTag.SelectorUTValue, new string('x', 15 * 1024 * 1024)) }; var largeMessage = new DicomFileMessage { SeriesInstanceUID = "", StudyInstanceUID = "", SOPInstanceUID = "", NationalPACSAccessionNumber = "", DicomFilePath = "", DicomDataset = DicomTypeTranslater.SerializeDatasetToJson(dataset) }; processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1); processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); }
public void TestImageDocumentFormat() { GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); options.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue / 1000; string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestImageDocumentFormat"); var testAdapter = new MongoDbAdapter("TestImageDocumentFormat", options.MongoDatabases.DicomStoreOptions, collectionName); var callbackUsed = false; Action <Exception> exceptionCallback = (exception) => { callbackUsed = true; }; var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback) { Model = Mock.Of <IModel>() }; var header = new MessageHeader(); // Max queue size set to 1 so will immediately process this processor.AddToWriteQueue(_helper.TestImageMessage, header, 1); Assert.False(callbackUsed); Assert.True(processor.AckCount == 1); IMongoCollection <BsonDocument> imageCollection = _helper.TestDatabase.GetCollection <BsonDocument>(collectionName + "_SR"); Assert.True(imageCollection.CountDocuments(new BsonDocument()) == 1); BsonDocument doc = imageCollection.FindAsync(FilterDefinition <BsonDocument> .Empty).Result.Single(); Validate(_helper.TestImageMessage, header, doc); }
public void TestSeriesDocumentFormat() { GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); options.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue; string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestSeriesDocumentFormat"); var testAdapter = new MongoDbAdapter("TestSeriesDocumentFormat", options.MongoDatabases.DicomStoreOptions, collectionName); var callbackUsed = false; Action <Exception> exceptionCallback = (exception) => { callbackUsed = true; }; var processor = new SeriesMessageProcessor(options.MongoDbPopulatorOptions, testAdapter, 1, exceptionCallback) { Model = Mock.Of <IModel>() }; // Max queue size set to 1 so will immediately process this processor.AddToWriteQueue(_helper.TestSeriesMessage, new MessageHeader(), 1); Assert.False(callbackUsed); Assert.True(processor.AckCount == 1); IMongoCollection <BsonDocument> collection = _helper.TestDatabase.GetCollection <BsonDocument>(collectionName); Assert.True(collection.CountDocuments(new BsonDocument()) == 1); BsonDocument document = collection.Find(_ => true).ToList()[0]; Validate(_helper.TestSeriesMessage, document); }
/// <summary> /// Writes all messages currently in the queue to MongoDb and acknowledges /// </summary> protected override void ProcessQueue() { // Will happen when ProcessQueue is called due to the timer, before we receive our first message if (Model == null) { return; } lock (LockObj) { if (ToProcess.Count == 0) { return; } Logger.Info($"Queue contains {ToProcess.Count} message to write"); foreach ((string modality, List <BsonDocument> modalityDocs) in MongoModalityGroups.GetModalityChunks(ToProcess.Select(x => x.Item1).ToList())) { Logger.Debug($"Attempting to write {modalityDocs.Count} documents of modality {modality}"); while (FailedWriteAttempts < FailedWriteLimit) { WriteResult imageWriteResult = MongoDbAdapter.WriteMany(modalityDocs, modality); if (imageWriteResult == WriteResult.Success) { Logger.Debug($"Wrote {modalityDocs.Count} documents successfully, sending ACKs"); // Hopefully this uses ReferenceEquals, otherwise will be slow... foreach (ulong deliveryTag in ToProcess .Where(x => modalityDocs.Contains(x.Item1)) .Select(x => x.Item2)) { Model.BasicAck(deliveryTag, false); } AckCount += modalityDocs.Count; FailedWriteAttempts = 0; break; } Logger.Warn($"Failed to write {FailedWriteAttempts + 1} time(s) in a row"); if (++FailedWriteAttempts < FailedWriteLimit) { continue; } throw new ApplicationException("Failed write attempts exceeded"); } } Logger.Debug("Wrote and acknowledged all documents in queue. Clearing and continutig"); ToProcess.Clear(); } }
public IResponse <NoValue> SendToMongoDb <T>(long enterpriseAdapterId, List <T> blocks) { var response = new Response <NoValue>(); try { var enterpriseAdapter = _enterpriseAdapterService.GetEnterpriseAdapter(enterpriseAdapterId); if (enterpriseAdapter.Status != StatusEnum.Success) { response.Status = StatusEnum.Error; return(response); } var config = new Config { ConnString = new DbAdapter.ConnStringCreator { Server = enterpriseAdapter.Value.ServerIP, Port = enterpriseAdapter.Value.Port.ToString(), Uid = enterpriseAdapter.Value.Username, Pwd = enterpriseAdapter.Value.Password, Database = enterpriseAdapter.Value.DatabaseName } }; var adapter = new MongoDbAdapter(_logger); if (typeof(T) == typeof(EthereumBlockModel)) { response = adapter.SendToMongoDb(blocks, enterpriseAdapter.Value, config.ConnString.MongoDbConnString, CryptoAdapterType.Ethereum); } else if (typeof(T) == typeof(BitcoinBlockModel)) { response = adapter.SendToMongoDb(blocks, enterpriseAdapter.Value, config.ConnString.MongoDbConnString, CryptoAdapterType.Bitcoin); } else if (typeof(T) == typeof(NeoBlockModel)) { response = adapter.SendToMongoDb(blocks, enterpriseAdapter.Value, config.ConnString.MongoDbConnString, CryptoAdapterType.NEO); } else if (typeof(T) == typeof(LitecoinBlockModel)) { response = adapter.SendToMongoDb(blocks, enterpriseAdapter.Value, config.ConnString.MongoDbConnString, CryptoAdapterType.Litecoin); } } catch (Exception ex) { response.Status = StatusEnum.Error; response.Message = ex.Message; _logger.Information($"DbAdapterService.SendToMongoDb(jobid: {enterpriseAdapterId}, block:{blocks})"); _logger.Error(ex.Message); } return(response); }
/// <summary> /// Writes all messages currently in the queue to MongoDb and acknowledges /// </summary> protected override void ProcessQueue() { // Will happen when ProcessQueue is called before we receive our first message if (Model == null) { return; } lock (LockObj) { if (ToProcess.Count == 0) { return; } Logger.Debug("SeriesMessageProcessor: Queue contains " + ToProcess.Count + " message to write"); IEnumerable <string> batchDirectories = ToProcess.Select(t => t.Item1.GetValue("header")["DirectoryPath"].AsString).Distinct(); Logger.Trace($"Writing series from directories: {string.Join(", ", batchDirectories)}"); WriteResult seriesWriteResult = MongoDbAdapter.WriteMany(ToProcess.Select(t => t.Item1).ToList()); // Result => Need to differentiate between connection loss and error in the data to be written // As well as making sure either all are written or none if (seriesWriteResult == WriteResult.Success) { Logger.Debug("SeriesMessageProcessor: Wrote " + ToProcess.Count + " messages successfully, sending ACKs"); foreach (ulong deliveryTag in ToProcess.Select(t => t.Item2)) { Model.BasicAck(deliveryTag, false); } AckCount += ToProcess.Count; ToProcess.Clear(); FailedWriteAttempts = 0; } else { Logger.Warn($"SeriesMessageProcessor: Failed to write {FailedWriteAttempts + 1} time(s) in a row"); if (++FailedWriteAttempts < FailedWriteLimit) { return; } throw new ApplicationException("Failed write attempts exceeded"); } } }
public void TestLargeMessageNack() { GlobalOptions options = MongoDbPopulatorTestHelper.GetNewMongoDbPopulatorOptions(); options.MongoDbPopulatorOptions.MongoDbFlushTime = int.MaxValue / 1000; var adapter = new MongoDbAdapter("ImageProcessor", options.MongoDatabases.ExtractionStoreOptions, "largeDocumentTest"); var processor = new ImageMessageProcessor(options.MongoDbPopulatorOptions, adapter, 1, null); var mockModel = Mock.Of <IModel>(); processor.Model = mockModel; var dataset = new DicomDataset { new DicomUnlimitedText(DicomTag.SelectorUTValue, new string('x', 16 * 1024 * 1024)) }; string json = DicomTypeTranslater.SerializeDatasetToJson(dataset); var largeMessage = new DicomFileMessage { SeriesInstanceUID = "", StudyInstanceUID = "", SOPInstanceUID = "", NationalPACSAccessionNumber = "", DicomFilePath = "", DicomDataset = json }; Assert.Throws <ApplicationException>(() => processor.AddToWriteQueue(largeMessage, new MessageHeader(), 1)); dataset = new DicomDataset { // Should be ok, getting close to the threshold new DicomUnlimitedText(DicomTag.SelectorUTValue, new string('x', 15 * 1024 * 1024 + 512)) }; json = DicomTypeTranslater.SerializeDatasetToJson(dataset); largeMessage.DicomDataset = json; processor.AddToWriteQueue(largeMessage, new MessageHeader(), 2); Assert.True(processor.AckCount == 1); }
public MongoDbPopulatorMessageConsumer(MongoDbOptions mongoDbOptions, MongoDbPopulatorOptions populatorOptions, ConsumerOptions consumerOptions) { if (typeof(T) == typeof(DicomFileMessage)) { var mongoImageAdapter = new MongoDbAdapter("ImageMessageProcessor", mongoDbOptions, populatorOptions.ImageCollection); Processor = (IMessageProcessor <T>) new ImageMessageProcessor(populatorOptions, mongoImageAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); } else if (typeof(T) == typeof(SeriesMessage)) { var mongoSeriesAdapter = new MongoDbAdapter("SeriesMessageProcessor", mongoDbOptions, populatorOptions.SeriesCollection); Processor = (IMessageProcessor <T>) new SeriesMessageProcessor(populatorOptions, mongoSeriesAdapter, consumerOptions.QoSPrefetchCount, ExceptionCallback); } else { throw new ArgumentException("Message type " + typeof(T).Name + " not supported here"); } ConsumerOptions = consumerOptions; Logger.Debug(_messageTypePrefix + "Constructed for " + typeof(T).Name); }
public void TestBasicWrite() { string collectionName = MongoDbPopulatorTestHelper.GetCollectionNameForTest("TestBasicWrite"); var adapter = new MongoDbAdapter("TestApplication", _helper.Globals.MongoDatabases.DicomStoreOptions, collectionName); var testDoc = new BsonDocument { { "hello", "world" } }; WriteResult result = adapter.WriteMany(new List <BsonDocument> { testDoc }); Assert.True(result == WriteResult.Success); Assert.True(_helper.TestDatabase.GetCollection <BsonDocument>(collectionName) .CountDocuments(new BsonDocument()) == 1); BsonDocument doc = _helper.TestDatabase.GetCollection <BsonDocument>(collectionName).Find(_ => true).ToList()[0]; Assert.True(doc.Equals(testDoc)); var toWrite = new List <BsonDocument>(); for (var i = 0; i < 99; i++) { toWrite.Add(new BsonDocument { { "hello", i } }); } result = adapter.WriteMany(toWrite); Assert.True(result == WriteResult.Success); Assert.True(_helper.TestDatabase.GetCollection <BsonDocument>(collectionName) .CountDocuments(new BsonDocument()) == 100); }
private IResponse <NoValue> TestConnection(EnterpriseAdapterModel enterpriseAdapter) { var connectionResponse = new Response <NoValue>(); var config = new Config { ConnString = new ConnStringCreator { Server = enterpriseAdapter.ServerIP, Port = enterpriseAdapter.Port.ToString(), Database = enterpriseAdapter.DatabaseName, Pwd = enterpriseAdapter.Password, Uid = enterpriseAdapter.Username } }; IAdapter adapter; IResponse <NoValue> connectionStatus = null; switch (enterpriseAdapter.EnterpriseAdapter) { case AdapterTypeItemEnum.MSSQL: adapter = new MSSQLAdapter(_logger); if (enterpriseAdapter.Direction == DirectionEnum.Source) { connectionStatus = adapter.TestConnectivity(config.ConnString.MSSQLConnString, enterpriseAdapter.ParentTable); } else { connectionStatus = adapter.TestConnectivity(config.ConnString.MSSQLConnString); } break; case AdapterTypeItemEnum.MySQL: adapter = new MySQLAdapter(_logger); if (enterpriseAdapter.Direction == DirectionEnum.Source) { connectionStatus = adapter.TestConnectivity(config.ConnString.MySQLConnString, enterpriseAdapter.ParentTable); } else { connectionStatus = adapter.TestConnectivity(config.ConnString.MySQLConnString); } break; case AdapterTypeItemEnum.Oracle: adapter = new OracleAdapter(_logger); if (enterpriseAdapter.Direction == DirectionEnum.Source) { connectionStatus = adapter.TestConnectivity(config.ConnString.OracleConnString, enterpriseAdapter.ParentTable); } else { connectionStatus = adapter.TestConnectivity(config.ConnString.OracleConnString); } break; case AdapterTypeItemEnum.MongoDB: var mongoAdapter = new MongoDbAdapter(_logger); connectionStatus = mongoAdapter.TestConnection(config.ConnString.MongoDbConnString, enterpriseAdapter.DatabaseName); break; } if (connectionStatus.Status != StatusEnum.Success) { connectionResponse.Status = StatusEnum.Error; connectionResponse.Message = connectionStatus.Message; } else { connectionResponse.Status = StatusEnum.Success; } return(connectionResponse); }