public async Task If_update_is_requested_with_no_metadata_access_tokens_sync_is_aborted() { // Arrange var archvieRecordId = "3457"; var mutationId = 6626; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = "Aip@DossierId" }, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string>() } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); // Act await InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // Wait for the results await syncArchiveRecordTask; // Assert harvestManager.Verify(e => e.UpdateMutationStatus(It.IsAny <MutationStatusInfo>()), Times.Once); }
public async Task If_Package_is_valid_the_package_is_scheduled_for_sync() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "478" }; var mutationId = 777; var errMsg = string.Empty; var appendResult = new RepositoryPackageInfoResult { Valid = true, Success = true, ErrorMessage = errMsg, PackageDetails = new RepositoryPackage { ArchiveRecordId = ar.ArchiveRecordId } }; repositoryManager.Setup(e => e.ReadPackageMetadata(It.IsAny <string>(), It.IsAny <string>())).Returns(appendResult); // Act await InputQueueSendEndpoint.Send <IArchiveRecordAppendPackageMetadata>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await readPackageMetadataTask; var context = await scheduleForPackageSyncTask; // Assert context.Message.Workload.ArchiveRecord.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); context.Message.Workload.MutationId.Should().Be(mutationId); }
public async Task If_Remove_succeeds_Sync_process_is_set_to_success() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "32245" }; var mutationId = 1243; indexManager.Setup(e => e.RemoveArchiveRecord(It.IsAny <ConsumeContext <IRemoveArchiveRecord> >())); // Act await InputQueueSendEndpoint.Send <IRemoveArchiveRecord>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await removeArchiveReocrdTask; var context = await archiveRecordRemovedTask; // Assert context.Message.ActionSuccessful.Should().Be(true); context.Message.MutationId.Should().Be(mutationId); context.Message.ErrorMessage.Should().Be(null); }
public async Task If_AppendPackage_failed_Sync_process_is_set_to_failed() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "345" }; var mutationId = 666; var errMsg = "Some error message"; var appendResult = new RepositoryPackageResult { Valid = false, Success = false, ErrorMessage = errMsg }; repositoryManager.Setup(e => e.AppendPackageToArchiveRecord(It.IsAny <ArchiveRecord>(), It.IsAny <long>(), It.IsAny <int>())) .ReturnsAsync(appendResult); // Act await InputQueueSendEndpoint.Send <IArchiveRecordAppendPackage>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await appendPackageTask; var context = await archiveRecordUpdatedTask; // Assert context.Message.ActionSuccessful.Should().Be(false); context.Message.MutationId.Should().Be(mutationId); context.Message.ErrorMessage.Should().Be(errMsg); }
public async Task If_Package_not_valid_Sync_process_is_set_to_failed() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "344" }; var mutationId = 999; var errMsg = "Some other error message"; var appendResult = new RepositoryPackageInfoResult { Valid = false, Success = true, ErrorMessage = errMsg }; repositoryManager.Setup(e => e.ReadPackageMetadata(It.IsAny <string>(), It.IsAny <string>())).Returns(appendResult); // Act await InputQueueSendEndpoint.Send <IArchiveRecordAppendPackageMetadata>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await readPackageMetadataTask; var context = await archiveRecordUpdatedTask; // Assert context.Message.ActionSuccessful.Should().Be(false); context.Message.MutationId.Should().Be(mutationId); context.Message.ErrorMessage.Should().Be(errMsg); }
public async Task If_delete_is_requested_record_is_removed_from_index() { // Arrange var archvieRecordId = "34599"; var mutationId = 6616; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = null } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); // Act await InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "delete" }); // Wait for the results await syncArchiveRecordTask; var context = await removeArchiveRemoveTask; // Assert context.Message.ArchiveRecordId.Should().Be(archvieRecordId); context.Message.MutationId.Should().Be(mutationId); }
public async Task If_Package_is_valid_extract_fulltext_is_initiated() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "478" }; var mutationId = 777; var errMsg = string.Empty; var appendResult = new RepositoryPackageResult { Valid = true, Success = true, ErrorMessage = errMsg, PackageDetails = new RepositoryPackage { PackageFileName = "need a file name.whatever" } }; repositoryManager.Setup(e => e.AppendPackageToArchiveRecord(It.IsAny <ArchiveRecord>(), It.IsAny <long>(), It.IsAny <int>())) .ReturnsAsync(appendResult); // Act await InputQueueSendEndpoint.Send <IArchiveRecordAppendPackage>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await appendPackageTask; var context = await extractFulltextTask; // Assert context.Message.ArchiveRecord.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); context.Message.MutationId.Should().Be(mutationId); }
/// <summary> /// Builds the specified archive record. /// </summary> /// <param name="archiveRecordId">The archive record identifier.</param> /// <returns>ArchiveRecord if found, or null if no record with that id can be found in the database</returns> public virtual ArchiveRecord Build(string archiveRecordId) { var sw = new Stopwatch(); sw.Start(); // Get some base information about the record that is used in several places var recordRow = dataProvider.GetArchiveRecordRow(Convert.ToInt64(archiveRecordId)); // If we don't receive a record, it does not exist. if (recordRow == null) { return(null); } var ar = new ArchiveRecord { ArchiveRecordId = archiveRecordId, Metadata = LoadMetadata(archiveRecordId, recordRow), Security = LoadSecurityDetails(archiveRecordId) }; ar.Display = LoadDisplayData(archiveRecordId, ar.Metadata, recordRow); sw.Stop(); Log.Information("Took {Time}ms to build ArchiveRecord for id {Id}", sw.ElapsedMilliseconds, archiveRecordId); return(ar); }
public async Task If_update_is_requested_for_forbidden_archive_record_sync_is_aborted() { // Arrange var archvieRecordId = "34527"; var mutationId = 66267; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string>() } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); harvestManager.Setup(e => e.UpdateMutationStatus(It.IsAny <MutationStatusInfo>())).Verifiable(); // Act await InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // Wait for the results await syncArchiveRecordTask; // Assert harvestManager.Verify( v => v.UpdateMutationStatus(It.Is <MutationStatusInfo>(m => m.ChangeFromStatus == ActionStatus.SyncInProgress && m.NewStatus == ActionStatus.SyncAborted && m.MutationId == mutationId)), Times.Once); }
public async Task If_Update_throws_error_Sync_process_is_set_to_failed() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "345" }; var mutationId = 666; var errMsg = "Hi I'm an error"; indexManager.Setup(e => e.UpdateArchiveRecord(It.IsAny <ConsumeContext <IUpdateArchiveRecord> >())).Throws(new Exception(errMsg)); // Act await InputQueueSendEndpoint.Send <IUpdateArchiveRecord>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results await updateArchiveReocrdTask; var context = await archiveRecordUpdatedTask; // Assert context.Message.ActionSuccessful.Should().Be(false); context.Message.MutationId.Should().Be(mutationId); context.Message.ErrorMessage.Should().Be(errMsg); }
/// <summary> /// Extracts the fulltext and adds the resulting text to the ArchiveRecord. /// </summary> /// <param name="mutationId">The mutation identifier.</param> /// <param name="archiveRecord">The archive record.</param> /// <returns><c>true</c> if successful, <c>false</c> otherwise.</returns> public async Task <bool> ExtractFulltext(long mutationId, ArchiveRecord archiveRecord, int primaerdatenAuftragStatusId) { var packages = archiveRecord.PrimaryData; var processingTimeForMissingFiles = 0L; foreach (var repositoryPackage in packages.Where(p => !string.IsNullOrEmpty(p.PackageFileName))) { var packageFileName = Path.Combine(Settings.Default.PickupPath, repositoryPackage.PackageFileName); var fi = new FileInfo(packageFileName); var watch = Stopwatch.StartNew(); if (File.Exists(fi.FullName)) { Log.Information("Found zip file {Name}. Starting to extract...", fi.Name); var tempFolder = Path.Combine(fi.DirectoryName, fi.Name.Remove(fi.Name.Length - fi.Extension.Length)); try { ZipFile.ExtractToDirectory(packageFileName, tempFolder); var sizeInBytesOnDisk = Directory.GetFiles(tempFolder, "*.*", SearchOption.AllDirectories).Select(f => new FileInfo(f).Length) .Sum(); var status = new UpdatePrimaerdatenAuftragStatus { PrimaerdatenAuftragId = primaerdatenAuftragStatusId, Service = AufbereitungsServices.AssetService, Status = AufbereitungsStatusEnum.ZipEntpackt }; await UpdatePrimaerdatenAuftragStatus(status); await ProcessFiles(repositoryPackage.Files, Path.Combine(tempFolder, "content"), archiveRecord.ArchiveRecordId); await ProcessFolders(repositoryPackage.Folders, Path.Combine(tempFolder, "content"), archiveRecord.ArchiveRecordId); // if we are here everything is okay Log.Information("Successfully processed (fulltext extracted) zip file {Name}", fi.Name); processingTimeForMissingFiles += GetProcessingTimeOfIgnoredFilesInTicks(repositoryPackage.SizeInBytes - sizeInBytesOnDisk); } catch (Exception ex) { Log.Error(ex, "Unexpected error while extracting full text. Error Message is: {Message}", ex.Message); return(false); } finally { // Delete the temp files Directory.Delete(tempFolder, true); File.Delete(packageFileName); } } else { Log.Warning("Unable to find the zip file {packageFileName}. No text was extracted.", packageFileName); return(false); } repositoryPackage.FulltextExtractionDuration = watch.ElapsedTicks + processingTimeForMissingFiles; } return(true); }
public async Task If_Package_not_valid_Sync_process_is_set_to_failed() { // Arrange var harness = new InMemoryTestHarness(); try { var ar = new ArchiveRecord { ArchiveRecordId = "344" }; var mutationId = 999; var errMsg = "Some other error message"; var appendResult = new RepositoryPackageInfoResult { Valid = false, Success = true, ErrorMessage = errMsg }; repositoryManager.Setup(e => e.ReadPackageMetadata(It.IsAny <string>(), It.IsAny <string>())).Returns(appendResult); var readMetadataConsumer = harness.Consumer(() => readPackageMetadataConsumer.Object); harness.Consumer(() => new ReadPackageMetadataConsumer(repositoryManager.Object)); harness.Consumer(() => readPackageMetadataConsumer.Object); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send <IArchiveRecordAppendPackageMetadata>(new { ArchiveRecord = ar, MutationId = mutationId }); // Assert // did the endpoint consume the message Assert.That(await harness.Consumed.Any <IArchiveRecordAppendPackageMetadata>()); // did the actual consumer consume the message Assert.That(await readMetadataConsumer.Consumed.Any <IArchiveRecordAppendPackageMetadata>()); // the consumer publish the event Assert.That(await harness.Published.Any <IArchiveRecordUpdated>()); // ensure that no faults were published by the consumer Assert.That(await harness.Published.Any <Fault <IArchiveRecordUpdated> >(), Is.False); // did the actual consumer consume the message var message = harness.Published.Select <IArchiveRecordUpdated>().First().Context.Message; // Assert message.ActionSuccessful.Should().Be(false); message.MutationId.Should().Be(mutationId); message.ErrorMessage.Should().Be(errMsg); } finally { await harness.Stop(); } }
public async Task If_Update_succeeds_Sync_process_is_set_to_success() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "3245", Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string> { "BAR" }, PrimaryDataFulltextAccessToken = new List <string> { "BAR" }, PrimaryDataDownloadAccessToken = new List <string> { "BAR" } } }; var mutationId = 124; indexManager.Setup(e => e.UpdateArchiveRecord(It.IsAny <ConsumeContext <IUpdateArchiveRecord> >())); var harness = new InMemoryTestHarness(); var consumer = harness.Consumer(() => new UpdateArchiveRecordConsumer(indexManager.Object)); await harness.Start(); try { // Act await harness.InputQueueSendEndpoint.Send <IUpdateArchiveRecord>(new { ArchiveRecord = ar, MutationId = mutationId }); // did the endpoint consume the message Assert.That(await harness.Consumed.Any <IUpdateArchiveRecord>()); // did the actual consumer consume the message Assert.That(await consumer.Consumed.Any <IUpdateArchiveRecord>()); // was the update ArchiveRecord message sent Assert.That(await harness.Published.Any <IArchiveRecordUpdated>()); var message = harness.Published.Select <IArchiveRecordUpdated>().FirstOrDefault(); // Assert Assert.That(message != null); message.Context.Message.ActionSuccessful.Should().Be(true); message.Context.Message.MutationId.Should().Be(mutationId); message.Context.Message.ErrorMessage.Should().Be(null); } finally { await harness.Stop(); } }
public async Task If_delete_is_requested_record_is_removed_from_index() { // Arrange var archvieRecordId = "34599"; var mutationId = 6616; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = null } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); var harness = new InMemoryTestHarness(); var consumer = harness.Consumer(() => new SyncArchiveRecordConsumer(harvestManager.Object, findArchiveRecordClient.Object, cachedHarvesterSetting.Object)); await harness.Start(); try { // Act await harness.InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "delete" }); // did the endpoint consume the message Assert.That(await harness.Consumed.Any <ISyncArchiveRecord>()); // did the actual consumer consume the message Assert.That(await consumer.Consumed.Any <ISyncArchiveRecord>()); var message = harness.Consumed.Select <ISyncArchiveRecord>().FirstOrDefault(); // was the delete message sent Assert.That(await harness.Sent.Any <IRemoveArchiveRecord>()); // Assert Assert.That(message != null); message.Context.Message.ArchiveRecordId.Should().Be(archvieRecordId); message.Context.Message.MutationId.Should().Be(mutationId); } catch (Exception ex) { Debug.WriteLine(ex.Message); } finally { await harness.Stop(); } }
/// <summary> /// Extracts the fulltext and adds the resulting text to the ArchiveRecord. /// </summary> /// <param name="mutationId">The mutation identifier.</param> /// <param name="archiveRecord">The archive record.</param> /// <param name="primaerdatenAuftragId">The id number of the PrimaerdatenAuftrag</param> /// <returns><c>true</c> if successful, <c>false</c> otherwise.</returns> public async Task <bool> ExtractFulltext(long mutationId, ArchiveRecord archiveRecord, int primaerdatenAuftragId) { var packages = archiveRecord.PrimaryData; var processingTimeForMissingFiles = 0L; foreach (var repositoryPackage in packages.Where(p => !string.IsNullOrEmpty(p.PackageFileName))) { var packageFileName = Path.Combine(Settings.Default.PickupPath, repositoryPackage.PackageFileName); var fi = new FileInfo(packageFileName); var tempFolder = Path.Combine(fi.DirectoryName ?? throw new InvalidOperationException(), fi.Name.Remove(fi.Name.Length - fi.Extension.Length)); var watch = Stopwatch.StartNew(); if (Directory.Exists(tempFolder)) { Log.Information("Found unzipped files. Starting to process..."); var context = new JobContext { ArchiveRecordId = archiveRecord.ArchiveRecordId, PackageId = repositoryPackage.PackageId }; var sizeInBytesOnDisk = Directory.GetFiles(tempFolder, "*.*", SearchOption.AllDirectories).Select(f => new FileInfo(f).Length) .Sum(); try { await ProcessFiles(repositoryPackage.Files, Path.Combine(tempFolder, "content"), context); await ProcessFolders(repositoryPackage.Folders, Path.Combine(tempFolder, "content"), context); // if we are here everything is okay Log.Information("Successfully processed files (fulltext extracted) from zip file {Name}", fi.Name); processingTimeForMissingFiles += GetProcessingTimeOfIgnoredFilesInTicks(repositoryPackage.SizeInBytes - sizeInBytesOnDisk); } catch (Exception ex) { Log.Error(ex, "Unexpected error while extracting full text. Error Message is: {Message}", ex.Message); return(false); } finally { // Delete the temp files Directory.Delete(tempFolder, true); File.Delete(packageFileName); } } else { Log.Warning("Unable to find the unzipped files for {packageFileName}. No text was extracted.", packageFileName); return(false); } repositoryPackage.FulltextExtractionDuration = watch.ElapsedTicks + processingTimeForMissingFiles; } return(true); }
public async Task If_Package_is_valid_the_package_is_scheduled_for_sync() { // Arrange var harness = new InMemoryTestHarness(); try { var ar = new ArchiveRecord { ArchiveRecordId = "478" }; var mutationId = 777; var errMsg = string.Empty; var appendResult = new RepositoryPackageInfoResult { Valid = true, Success = true, ErrorMessage = errMsg, PackageDetails = new RepositoryPackage { ArchiveRecordId = ar.ArchiveRecordId } }; repositoryManager.Setup(e => e.ReadPackageMetadata(It.IsAny <string>(), It.IsAny <string>())).Returns(appendResult); var readMetadataConsumer = harness.Consumer(() => readPackageMetadataConsumer.Object); harness.Consumer(() => new ReadPackageMetadataConsumer(repositoryManager.Object)); harness.Consumer(() => readPackageMetadataConsumer.Object); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send <IArchiveRecordAppendPackageMetadata>(new { ArchiveRecord = ar, MutationId = mutationId }); // Assert // did the endpoint consume the message Assert.That(await harness.Consumed.Any <IArchiveRecordAppendPackageMetadata>()); // did the actual consumer consume the message Assert.That(await readMetadataConsumer.Consumed.Any <IArchiveRecordAppendPackageMetadata>()); Assert.That(await harness.Sent.Any <IScheduleForPackageSync>()); var message = harness.Sent.Select <IScheduleForPackageSync>().First().Context.Message; // Assert message.Workload.ArchiveRecord.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); message.Workload.MutationId.Should().Be(mutationId); } finally { await harness.Stop(); } }
If_update_is_requested_with_primary_data_and_elastic_record_with_identical_primaryLink_record_is_indexed_with_existing_data() { // Arrange var archvieRecordId = "3457"; var mutationId = 6626; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = "Aip@DossierId" }, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string> { "Ö1" } } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); var findResult = new FindArchiveRecordResponse { ArchiveRecordId = archvieRecordId, ElasticArchiveRecord = new ElasticArchiveRecord { ArchiveRecordId = archvieRecordId, PrimaryDataLink = "Aip@DossierId", PrimaryData = new List <ElasticArchiveRecordPackage> { new ElasticArchiveRecordPackage { FileCount = 5, PackageId = "controlPackageId" } } } }; findArchiveRecordClient.Setup(e => e.Request(It.IsAny <FindArchiveRecordRequest>(), CancellationToken.None)) .Returns(Task.FromResult(findResult)); // Act await InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // Wait for the results await syncArchiveRecordTask; var context = await updateArchiveRecordTask; // Assert context.Message.ArchiveRecord.ArchiveRecordId.Should().Be(archvieRecordId); context.Message.ArchiveRecord.ElasticPrimaryData[0].FileCount.Should().Be(5); context.Message.ArchiveRecord.ElasticPrimaryData[0].PackageId.Should().Be("controlPackageId"); context.Message.MutationId.Should().Be(mutationId); }
private static async Task UpdateArchiveRecord(ConsumeContext <ISyncArchiveRecord> context, ISyncArchiveRecord message, ArchiveRecord archiveRecord) { var ep = await context.GetSendEndpoint(new Uri(context.SourceAddress, BusConstants.IndexManagerUpdateArchiveRecordMessageQueue)); await ep.Send <IUpdateArchiveRecord>(new { message.MutationId, ArchiveRecord = archiveRecord }); Log.Information("Put {CommandName} message on index queue with mutation ID: {MutationId}", nameof(IUpdateArchiveRecord), context.Message.MutationId); }
public async Task If_update_is_requested_with_removed_primary_data_package_then_cache_is_deleted() { // Arrange var archvieRecordId = "3457"; var mutationId = 6626; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = null }, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string> { "Ö1" } } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); var findResult = new FindArchiveRecordResponse { ArchiveRecordId = archvieRecordId, ElasticArchiveRecord = new ElasticArchiveRecord { ArchiveRecordId = archvieRecordId, PrimaryDataLink = "AIP@DossierId" } }; findArchiveRecordClient.Setup(e => e.Request(It.IsAny <FindArchiveRecordRequest>(), CancellationToken.None)) .Returns(Task.FromResult(findResult)); // Act await InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // Wait for the results await syncArchiveRecordTask; var context = await updateArchiveRecordTask; var context2 = await removeFileFromCacheTask; // Assert context.Message.ArchiveRecord.ArchiveRecordId.Should().Be(archvieRecordId); context.Message.MutationId.Should().Be(mutationId); // Verify the delete cache method was called context2.Message.ArchiveRecordId.Should().Be(archvieRecordId); }
public async Task If_Package_is_valid_preprocessing_of_asset_is_initiated() { try { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "478" }; var mutationId = 777; var errMsg = string.Empty; var appendResult = new RepositoryPackageResult { Valid = true, Success = true, ErrorMessage = errMsg, PackageDetails = new RepositoryPackage { PackageFileName = "need a file name.whatever" } }; repositoryManager.Setup(e => e.AppendPackageToArchiveRecord(It.IsAny <ArchiveRecord>(), It.IsAny <long>(), It.IsAny <int>())) .ReturnsAsync(appendResult); var appendPackageConsumer = harness.Consumer(() => new AppendPackageConsumer(repositoryManager.Object)); harness.Consumer(() => archiveRecordAppendPackageConsumer.Object); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send <IArchiveRecordAppendPackage>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results Assert.That(await harness.Consumed.Any <IArchiveRecordAppendPackage>()); Assert.That(await appendPackageConsumer.Consumed.Any <IArchiveRecordAppendPackage>()); Assert.That(await harness.Sent.Any <PrepareForRecognitionMessage>()); var context = harness.Sent.Select <PrepareForRecognitionMessage>().First().Context; // Assert context.Message.ArchiveRecord.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); context.Message.MutationId.Should().Be(mutationId); } finally { await harness.Stop(); } }
public async Task If_Remove_throws_error_Sync_process_is_set_to_failed() { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "3457" }; var mutationId = 6667; var errMsg = "Hi I'm an error"; indexManager.Setup(e => e.RemoveArchiveRecord(It.IsAny <ConsumeContext <IRemoveArchiveRecord> >())).Throws(new Exception(errMsg)); var harness = new InMemoryTestHarness(); var consumer = harness.Consumer(() => new RemoveArchiveRecordConsumer(indexManager.Object)); await harness.Start(); try { // Act await harness.InputQueueSendEndpoint.Send <IRemoveArchiveRecord>(new { ArchiveRecord = ar, MutationId = mutationId }); // did the endpoint consume the message Assert.That(await harness.Consumed.Any <IRemoveArchiveRecord>()); // did the actual consumer consume the message Assert.That(await consumer.Consumed.Any <IRemoveArchiveRecord>()); // was the update ArchiveRecord message sent Assert.That(await harness.Published.Any <IArchiveRecordRemoved>()); var message = harness.Published.Select <IArchiveRecordRemoved>().FirstOrDefault(); // Assert Assert.That(message != null); // Assert message.Context.Message.ActionSuccessful.Should().Be(false); message.Context.Message.MutationId.Should().Be(mutationId); message.Context.Message.ErrorMessage.Should().Be(errMsg); } finally { await harness.Stop(); } }
public async Task If_update_is_requested_for_forbidden_archive_record_sync_is_aborted() { // Arrange var archvieRecordId = "34527"; var mutationId = 66267; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string>() } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); harvestManager.Setup(e => e.UpdateMutationStatus(It.IsAny <MutationStatusInfo>())).Verifiable(); var harness = new InMemoryTestHarness(); var consumer = harness.Consumer(() => new SyncArchiveRecordConsumer(harvestManager.Object, findArchiveRecordClient.Object, cachedHarvesterSetting.Object)); await harness.Start(); try { // Act await harness.InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // did the endpoint consume the message Assert.That(await harness.Consumed.Any <ISyncArchiveRecord>()); // did the actual consumer consume the message Assert.That(await consumer.Consumed.Any <ISyncArchiveRecord>()); // Assert harvestManager.Verify( v => v.UpdateMutationStatus(It.Is <MutationStatusInfo>(m => m.ChangeFromStatus == ActionStatus.SyncInProgress && m.NewStatus == ActionStatus.SyncAborted && m.MutationId == mutationId)), Times.Once); } finally { await harness.Stop(); } }
public async Task If_PrepareForRecognition_is_valid_extract_fulltext_is_initiated() { try { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "478", PrimaryData = new List <RepositoryPackage> { new RepositoryPackage { PackageFileName = "Testdummy", ArchiveRecordId = "478", Files = new List <RepositoryFile> { new RepositoryFile { PhysicalName = "test.xml" } } } } }; var mutationId = 777; assetManager.Setup(s => s.ExtractZipFile(It.IsAny <ExtractZipArgument>())).Returns(() => Task.FromResult(true)); var consumer = harness.Consumer(() => new PrepareForRecognitionConsumer(assetManager.Object, preparationEngine.Object)); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send(new PrepareForRecognitionMessage { ArchiveRecord = ar, MutationId = mutationId, PrimaerdatenAuftragId = 458 }); // Wait for the results Assert.That(await harness.Consumed.Any <PrepareForRecognitionMessage>()); Assert.That(await consumer.Consumed.Any <PrepareForRecognitionMessage>()); Assert.That(await harness.Sent.Any <IArchiveRecordExtractFulltextFromPackage>()); var context = harness.Sent.Select <IArchiveRecordExtractFulltextFromPackage>().First().Context; // Assert context.Message.ArchiveRecord.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); context.Message.MutationId.Should().Be(mutationId); } finally { await harness.Stop(); } }
public async Task If_AppendPackage_failed_Sync_process_is_set_to_failed() { // Arrange try { var ar = new ArchiveRecord { ArchiveRecordId = "345" }; var mutationId = 666; var errMsg = "Some error message"; var appendResult = new RepositoryPackageResult { Valid = false, Success = false, ErrorMessage = errMsg }; repositoryManager.Setup(e => e.AppendPackageToArchiveRecord(It.IsAny <ArchiveRecord>(), It.IsAny <long>(), It.IsAny <int>())) .ReturnsAsync(appendResult); var appendPackageConsumer = harness.Consumer(() => new AppendPackageConsumer(repositoryManager.Object)); harness.Consumer(() => archiveRecordAppendPackageConsumer.Object); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send <IArchiveRecordAppendPackage>(new { ArchiveRecord = ar, MutationId = mutationId }); // Wait for the results Assert.That(await harness.Consumed.Any <IArchiveRecordAppendPackage>()); Assert.That(await appendPackageConsumer.Consumed.Any <IArchiveRecordAppendPackage>()); Assert.That(await harness.Published.Any <IArchiveRecordUpdated>()); var context = harness.Published.Select <IArchiveRecordUpdated>().First().Context; // Assert context.Message.ActionSuccessful.Should().Be(false); context.Message.MutationId.Should().Be(mutationId); context.Message.ErrorMessage.Should().Be(errMsg); } finally { await harness.Stop(); } }
public async Task If_update_is_requested_with_no_metadata_access_tokens_sync_is_aborted() { // Arrange var archvieRecordId = "3457"; var mutationId = 6626; var ar = new ArchiveRecord { ArchiveRecordId = archvieRecordId, Metadata = new ArchiveRecordMetadata { PrimaryDataLink = "Aip@DossierId" }, Security = new ArchiveRecordSecurity { MetadataAccessToken = new List <string>() } }; harvestManager.Setup(e => e.BuildArchiveRecord(archvieRecordId)).Returns(ar); var harness = new InMemoryTestHarness(); var consumer = harness.Consumer(() => new SyncArchiveRecordConsumer(harvestManager.Object, findArchiveRecordClient.Object, cachedHarvesterSetting.Object)); await harness.Start(); try { // Act await harness.InputQueueSendEndpoint.Send <ISyncArchiveRecord>(new { ArchiveRecordId = archvieRecordId, MutationId = mutationId, Action = "UpDaTe" }); // did the endpoint consume the message Assert.That(await harness.Consumed.Any <ISyncArchiveRecord>()); // did the actual consumer consume the message Assert.That(await consumer.Consumed.Any <ISyncArchiveRecord>()); // Assert harvestManager.Verify(e => e.UpdateMutationStatus(It.IsAny <MutationStatusInfo>()), Times.Once); } finally { await harness.Stop(); } }
public async Task If_PrepareForTransformation_failed_Sync_process_is_set_to_failed() { try { // Arrange var ar = new ArchiveRecord { ArchiveRecordId = "112", PrimaryData = null // This provokes a failure }; var orderId = 777; var consumer = harness.Consumer(() => new PrepareForTransformationConsumer(assetManager.Object, scanProcessorMock.Object, transformEngineMock.Object, preparationEngine.Object)); await harness.Start(); // Act await harness.InputQueueSendEndpoint.Send(new PrepareForTransformationMessage() { AssetType = AssetType.Gebrauchskopie, RepositoryPackage = repositoryPackage, CallerId = "123", OrderItemId = orderId, ProtectWithPassword = false, RetentionCategory = CacheRetentionCategory.UsageCopyPublic, PrimaerdatenAuftragId = 458 }); // Wait for the results Assert.That(await harness.Consumed.Any <PrepareForTransformationMessage>()); Assert.That(await consumer.Consumed.Any <PrepareForTransformationMessage>()); Assert.That(await harness.Published.Any <IAssetReady>()); var context = harness.Published.Select <IAssetReady>().First().Context; // Assert context.Message.ArchiveRecordId.Should().Be(ar.ArchiveRecordId); context.Message.OrderItemId.Should().Be(orderId); context.Message.Valid.Should().BeFalse(); } finally { await harness.Stop(); } }
public async Task <RepositoryPackageResult> AppendPackageToArchiveRecord(ArchiveRecord archiveRecord, long mutationId, int primaerdatenId) { var startTime = DateTime.Now; var packageId = archiveRecord.Metadata.PrimaryDataLink; var archiveRecordId = archiveRecord.ArchiveRecordId; using (LogContext.PushProperty("packageId", packageId)) { if (!string.IsNullOrEmpty(packageId) && !string.IsNullOrEmpty(archiveRecordId)) { var fileTypesToIgnore = syncSettings.IgnorierteDateitypenFuerSynchronisierung.Split(','); // Getting the package, but for syncing we don't need the overhead of creating the metadata stuff var packageResult = await GetPackageInternal(packageId, archiveRecordId, false, fileTypesToIgnore.Select(f => f.Trim()).ToList(), primaerdatenId); // Output duration var timespan = new TimeSpan(DateTime.Now.Ticks - startTime.Ticks); Log.Information("Package {packageId} with {SizeInBytes} bytes fetched in {TotalSeconds} seconds. Valid status is: {Valid}", packageId, packageResult.PackageDetails.SizeInBytes, timespan.TotalSeconds, packageResult.Valid); if (packageResult.Success && packageResult.Valid) { // Append the package to the archive record archiveRecord.PrimaryData.Add(packageResult.PackageDetails); return(packageResult); } Log.Warning( "Package {packageId} for Archiverecord {archiveRecordId} not appended, because package could not be created or was invalid. ({ErrorMessage})", packageId, archiveRecordId, packageResult.ErrorMessage); packageResult.ErrorMessage += $"{(!string.IsNullOrEmpty(packageResult.ErrorMessage) ? Environment.NewLine : string.Empty)}Package successfull status: {packageResult.Success}. Package valid status: {packageResult.Valid}"; return(packageResult); } return(new RepositoryPackageResult { ErrorMessage = "Invalid arguments for appending package" }); } }
public async Task <ActionResult> SetMark(ArchiveRecordViewModel vm) { var archiveRecord = new ArchiveRecord { Mark = vm.Mark, Course = vm.Course, Student = vm.Student }; bool isSetted = await _teacherService.SetMarkAsync(archiveRecord); if (!isSetted) { return(Conflict()); } return(Ok()); }
public void TestNulledArgumentsInConstructor() { ArchiveRecord record = new ArchiveRecord(null, null, null, null, null, null, null, null, null, null, null, null, null, null, 0, 0); Assert.AreEqual(record.FromName, "NULL"); Assert.AreEqual(record.FromID, "NULL"); Assert.AreEqual(record.FromType, "NULL"); Assert.AreEqual(record.FromCardID, "NULL"); Assert.AreEqual(record.FromCardType, "NULL"); Assert.AreEqual(record.FromBankName, "NULL"); Assert.AreEqual(record.FromBankID, "NULL"); Assert.AreEqual(record.ToName, "NULL"); Assert.AreEqual(record.ToID, "NULL"); Assert.AreEqual(record.ToType, "NULL"); Assert.AreEqual(record.ToCardID, "NULL"); Assert.AreEqual(record.ToCardType, "NULL"); Assert.AreEqual(record.ToBankName, "NULL"); Assert.AreEqual(record.ToBankID, "NULL"); }
/// <summary> /// Gets the dossier data, which includes the dossier of the ordered item and all its children units, including the /// container of the children /// and the units in the container. /// </summary> /// <param name="archiveRecord">The archive record.</param> /// <returns>VerzEinheitType.</returns> private VerzEinheitType GetDossierData(ArchiveRecord archiveRecord) { // No matter which level the ordered item was, we always need to deliver the whole dossier var dossierLevelIndex = archiveRecord.Display.ArchiveplanContext.FindIndex(i => i.Level.Equals(DossierLevelIdentifier, StringComparison.InvariantCultureIgnoreCase)); if (dossierLevelIndex < 0) { throw new InvalidOperationException( "We could not find a dossier. Please check your data if the ordered item is either a dossier or a document."); } var dossierId = archiveRecord.Display.ArchiveplanContext[dossierLevelIndex].ArchiveRecordId; var dossier = dataProvider.LoadOrderDetailData(Convert.ToInt32(dossierId)); var dossierData = GetArchiveRecordDetailData(dossier); return(dossierData); }