public void EndToEnd_2_ProcessBatchJobWithDuplicateRowInFile()
        {
            var job = GetJob(UploadJobType.Batch, _jobGuid);
            job.Filename = GetTestFilePath("batch-indicator-upload-duplicate-rows.xlsx");
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new BatchWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var fileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, fileReader);

            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status);
            // Cleanup
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void EndToEnd_1_ProcessJobWithNewData()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job.Filename = GetTestFilePath("upload-simple-new-data.xlsx");
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var excelFileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status);
            // Clean up
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void EndToEnd_3_ProcessBatchJobWithDuplicateRowInPholio()
        {
            // First job
            var job = GetJob(UploadJobType.Batch, _jobGuid);
            job.Filename = GetTestFilePath("batch-indicator-upload-new-data.xlsx");
            job = JobRepository.SaveJob(job);

            // Process new job without any duplication
            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new BatchWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var excelFileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, excelFileReader);

            // Duplicate job
            var duplicateJob = GetJob(UploadJobType.Batch, _jobGuid);
            duplicateJob.Guid = Guid.NewGuid();
            duplicateJob.Filename = GetTestFilePath("batch-indicator-upload-new-data.xlsx");
            duplicateJob = JobRepository.SaveJob(duplicateJob);

            // Process job with duplication
            excelFileReader = new ExcelFileReader(duplicateJob.Filename);
            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, duplicateJob.Status);

            // Give confirmation to override 
            duplicateJob.Status = UploadJobStatus.ConfirmationGiven;
            JobRepository.UpdateJob(duplicateJob);

            excelFileReader = new ExcelFileReader(duplicateJob.Filename);
            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);
            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, duplicateJob.Status);

            // Cleanup
            CoreDataRepository.DeleteCoreData(job.Guid);
            CoreDataRepository.DeleteCoreData(duplicateJob.Guid);

            JobRepository.DeleteJob(duplicateJob.Guid);
            ErrorRepository.DeleteLog(duplicateJob.Guid);
        }
        public void StartJob(UploadJob job)
        {
            var validator = new WorksheetNameValidator();
            var actualFilePath = FilePathHelper.GetActualFilePath(job);
            var excelFileReader = new ExcelFileReader(actualFilePath);

            if (job.JobType == UploadJobType.Simple)
            {
                SetUsername(job);
                _logger.Info("Processing at Simple upload for {0} and jobid# is {1}", job.Username, job.Guid);
                var worker = new SimpleJobWorker();
                var processor = new SimpleWorksheetDataProcessor(_coreDataRepository, _loggingRepository);
                worker.ProcessJob(job, validator, processor, excelFileReader);
            }
            else
            {
                SetUsername(job);
                _logger.Info("Processing at Batch upload for {0} and jobid# is {1}", job.Username, job.Guid);
                var worker = new BatchJobWorker();
                var processor = new BatchWorksheetDataProcessor(_coreDataRepository, _loggingRepository);
                worker.ProcessJob(job, validator, processor, excelFileReader);
            }
        }
        public void EndToEnd_3_ProcessJobWithDuplicateRowInPholio()
        {
            hasSecondJobGuid = true;

            // First job
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx");
            job = JobRepository.SaveJob(job);


            // Process new job without any duplication
            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var excelFileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status);




            // Second job           
            _secondJobGuid = Guid.NewGuid();
            var duplicateJob = GetJob(UploadJobType.Simple, _secondJobGuid);

            duplicateJob.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx");
            duplicateJob = JobRepository.SaveJob(duplicateJob);

            // Wait for excel read connection to be disposed
            Thread.Sleep(1000);

            // Process job with duplication
            excelFileReader = new ExcelFileReader(duplicateJob.Filename);
            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, duplicateJob.Status);

            // Wait for excel read connection to be disposed
            Thread.Sleep(1000);

            duplicateJob.Status = UploadJobStatus.ConfirmationGiven;

            JobRepository.UpdateJob(duplicateJob);

            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);
            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, duplicateJob.Status);

        }
        public void ProcessJobWithDuplicateRows()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job = JobRepository.SaveJob(job);
            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var correctWorksheet = new List<string> { WorksheetNames.SimpleIndicator, WorksheetNames.SimplePholio };
            var indicatorDetailsTable = GetIndicatorDetailsTable();
            var pholioTable = GetPholioTable();

            var fileReader = new Mock<IExcelFileReader>();
            fileReader.Setup(x => x.GetWorksheets()).Returns(correctWorksheet);
            fileReader.Setup(x => x.GetIndicatorDetails()).Returns(indicatorDetailsTable);
            fileReader.Setup(x => x.GetPholioData()).Returns(pholioTable);
            // Add new data
            worker.ProcessJob(job, validator, processor, fileReader.Object);
            // Add duplicate data
            worker.ProcessJob(job, validator, processor, fileReader.Object);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, job.Status);

            // Cleanup
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void ProcessJobWithWrongWorksheet()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            // Save to db
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);

            var wrongWorksheets = new List<string> { "Fus", "FPM" };
            var fileReader = new Mock<IExcelFileReader>();
            fileReader.Setup(f => f.GetWorksheets()).Returns(wrongWorksheets);

            worker.ProcessJob(job, validator, processor, fileReader.Object);

            Assert.AreEqual(UploadJobStatus.FailedValidation, job.Status);

            // Clean up
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void ProcessBatchJobWithDuplicateRows()
        {
            var job = GetJob(UploadJobType.Batch, _jobGuid);
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new BatchWorksheetDataProcessor(CoreDataRepository, LoggingRepository);

            var correctWorksheet = new List<string> { WorksheetNames.BatchPholio };
            // Add new Data
            var batchDataTable = GetEmptyDataTable();
            batchDataTable.Rows.Add(91491, 2030, 1, -1, -1, 44, 1, "E92000001", -1, 35.3, 34.9, 35.7, 56704, -1, 0, -1, -1);

            var fileReader = new Moq.Mock<IExcelFileReader>();
            fileReader.Setup(f => f.GetWorksheets()).Returns(correctWorksheet);
            fileReader.Setup(f => f.GetBatchData()).Returns(batchDataTable);

            worker.ProcessJob(job, validator, processor, fileReader.Object);

            // Try adding duplicate data
            worker.ProcessJob(job, validator, processor, fileReader.Object);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, job.Status);

            // Cleanup
            CoreDataRepository.DeleteCoreData(job.Guid);
        }