public void EndToEnd_1_ProcessJobWithNewData() { var job = GetJob(UploadJobType.Simple, _jobGuid); job.Filename = GetTestFilePath("upload-simple-new-data.xlsx"); job = JobRepository.SaveJob(job); var worker = GetWorker(); var validator = new WorksheetNameValidator(); var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository); var excelFileReader = new ExcelFileReader(job.Filename); worker.ProcessJob(job, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status); // Clean up CoreDataRepository.DeleteCoreData(_jobGuid); }
public void EndToEnd_2_ProcessBatchJobWithDuplicateRowInFile() { var job = GetJob(UploadJobType.Batch, _jobGuid); job.Filename = GetTestFilePath("batch-indicator-upload-duplicate-rows.xlsx"); job = JobRepository.SaveJob(job); var worker = GetWorker(); var validator = new WorksheetNameValidator(); var processor = new BatchWorksheetDataProcessor(CoreDataRepository, LoggingRepository); var fileReader = new ExcelFileReader(job.Filename); worker.ProcessJob(job, validator, processor, fileReader); Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status); // Cleanup CoreDataRepository.DeleteCoreData(_jobGuid); }
public void EndToEnd_3_ProcessBatchJobWithDuplicateRowInPholio() { // First job var job = GetJob(UploadJobType.Batch, _jobGuid); job.Filename = GetTestFilePath("batch-indicator-upload-new-data.xlsx"); job = JobRepository.SaveJob(job); // Process new job without any duplication var worker = GetWorker(); var validator = new WorksheetNameValidator(); var processor = new BatchWorksheetDataProcessor(CoreDataRepository, LoggingRepository); var excelFileReader = new ExcelFileReader(job.Filename); worker.ProcessJob(job, validator, processor, excelFileReader); // Duplicate job var duplicateJob = GetJob(UploadJobType.Batch, _jobGuid); duplicateJob.Guid = Guid.NewGuid(); duplicateJob.Filename = GetTestFilePath("batch-indicator-upload-new-data.xlsx"); duplicateJob = JobRepository.SaveJob(duplicateJob); // Process job with duplication excelFileReader = new ExcelFileReader(duplicateJob.Filename); worker.ProcessJob(duplicateJob, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, duplicateJob.Status); // Give confirmation to override duplicateJob.Status = UploadJobStatus.ConfirmationGiven; JobRepository.UpdateJob(duplicateJob); excelFileReader = new ExcelFileReader(duplicateJob.Filename); worker.ProcessJob(duplicateJob, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.SuccessfulUpload, duplicateJob.Status); // Cleanup CoreDataRepository.DeleteCoreData(job.Guid); CoreDataRepository.DeleteCoreData(duplicateJob.Guid); JobRepository.DeleteJob(duplicateJob.Guid); ErrorRepository.DeleteLog(duplicateJob.Guid); }
public void StartJob(UploadJob job) { var validator = new WorksheetNameValidator(); var actualFilePath = FilePathHelper.GetActualFilePath(job); var excelFileReader = new ExcelFileReader(actualFilePath); if (job.JobType == UploadJobType.Simple) { SetUsername(job); _logger.Info("Processing at Simple upload for {0} and jobid# is {1}", job.Username, job.Guid); var worker = new SimpleJobWorker(); var processor = new SimpleWorksheetDataProcessor(_coreDataRepository, _loggingRepository); worker.ProcessJob(job, validator, processor, excelFileReader); } else { SetUsername(job); _logger.Info("Processing at Batch upload for {0} and jobid# is {1}", job.Username, job.Guid); var worker = new BatchJobWorker(); var processor = new BatchWorksheetDataProcessor(_coreDataRepository, _loggingRepository); worker.ProcessJob(job, validator, processor, excelFileReader); } }
public void EndToEnd_3_ProcessJobWithDuplicateRowInPholio() { hasSecondJobGuid = true; // First job var job = GetJob(UploadJobType.Simple, _jobGuid); job.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx"); job = JobRepository.SaveJob(job); // Process new job without any duplication var worker = GetWorker(); var validator = new WorksheetNameValidator(); var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository); var excelFileReader = new ExcelFileReader(job.Filename); worker.ProcessJob(job, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status); // Second job _secondJobGuid = Guid.NewGuid(); var duplicateJob = GetJob(UploadJobType.Simple, _secondJobGuid); duplicateJob.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx"); duplicateJob = JobRepository.SaveJob(duplicateJob); // Wait for excel read connection to be disposed Thread.Sleep(1000); // Process job with duplication excelFileReader = new ExcelFileReader(duplicateJob.Filename); worker.ProcessJob(duplicateJob, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, duplicateJob.Status); // Wait for excel read connection to be disposed Thread.Sleep(1000); duplicateJob.Status = UploadJobStatus.ConfirmationGiven; JobRepository.UpdateJob(duplicateJob); worker.ProcessJob(duplicateJob, validator, processor, excelFileReader); Assert.AreEqual(UploadJobStatus.SuccessfulUpload, duplicateJob.Status); }