public void EndToEnd_1_ProcessJobWithNewData()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job.Filename = GetTestFilePath("upload-simple-new-data.xlsx");
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var excelFileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status);
            // Clean up
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void StartJob(UploadJob job)
        {
            var validator = new WorksheetNameValidator();
            var actualFilePath = FilePathHelper.GetActualFilePath(job);
            var excelFileReader = new ExcelFileReader(actualFilePath);

            if (job.JobType == UploadJobType.Simple)
            {
                SetUsername(job);
                _logger.Info("Processing at Simple upload for {0} and jobid# is {1}", job.Username, job.Guid);
                var worker = new SimpleJobWorker();
                var processor = new SimpleWorksheetDataProcessor(_coreDataRepository, _loggingRepository);
                worker.ProcessJob(job, validator, processor, excelFileReader);
            }
            else
            {
                SetUsername(job);
                _logger.Info("Processing at Batch upload for {0} and jobid# is {1}", job.Username, job.Guid);
                var worker = new BatchJobWorker();
                var processor = new BatchWorksheetDataProcessor(_coreDataRepository, _loggingRepository);
                worker.ProcessJob(job, validator, processor, excelFileReader);
            }
        }
        public void EndToEnd_3_ProcessJobWithDuplicateRowInPholio()
        {
            hasSecondJobGuid = true;

            // First job
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx");
            job = JobRepository.SaveJob(job);


            // Process new job without any duplication
            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var excelFileReader = new ExcelFileReader(job.Filename);
            worker.ProcessJob(job, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, job.Status);




            // Second job           
            _secondJobGuid = Guid.NewGuid();
            var duplicateJob = GetJob(UploadJobType.Simple, _secondJobGuid);

            duplicateJob.Filename = GetTestFilePath("upload-simple-duplicate-row-in-pholio.xlsx");
            duplicateJob = JobRepository.SaveJob(duplicateJob);

            // Wait for excel read connection to be disposed
            Thread.Sleep(1000);

            // Process job with duplication
            excelFileReader = new ExcelFileReader(duplicateJob.Filename);
            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, duplicateJob.Status);

            // Wait for excel read connection to be disposed
            Thread.Sleep(1000);

            duplicateJob.Status = UploadJobStatus.ConfirmationGiven;

            JobRepository.UpdateJob(duplicateJob);

            worker.ProcessJob(duplicateJob, validator, processor, excelFileReader);
            Assert.AreEqual(UploadJobStatus.SuccessfulUpload, duplicateJob.Status);

        }
        public void ProcessJobWithDuplicateRows()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            job = JobRepository.SaveJob(job);
            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);
            var correctWorksheet = new List<string> { WorksheetNames.SimpleIndicator, WorksheetNames.SimplePholio };
            var indicatorDetailsTable = GetIndicatorDetailsTable();
            var pholioTable = GetPholioTable();

            var fileReader = new Mock<IExcelFileReader>();
            fileReader.Setup(x => x.GetWorksheets()).Returns(correctWorksheet);
            fileReader.Setup(x => x.GetIndicatorDetails()).Returns(indicatorDetailsTable);
            fileReader.Setup(x => x.GetPholioData()).Returns(pholioTable);
            // Add new data
            worker.ProcessJob(job, validator, processor, fileReader.Object);
            // Add duplicate data
            worker.ProcessJob(job, validator, processor, fileReader.Object);

            Assert.AreEqual(UploadJobStatus.ConfirmationAwaited, job.Status);

            // Cleanup
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }
        public void ProcessJobWithWrongWorksheet()
        {
            var job = GetJob(UploadJobType.Simple, _jobGuid);
            // Save to db
            job = JobRepository.SaveJob(job);

            var worker = GetWorker();
            var validator = new WorksheetNameValidator();
            var processor = new SimpleWorksheetDataProcessor(CoreDataRepository, LoggingRepository);

            var wrongWorksheets = new List<string> { "Fus", "FPM" };
            var fileReader = new Mock<IExcelFileReader>();
            fileReader.Setup(f => f.GetWorksheets()).Returns(wrongWorksheets);

            worker.ProcessJob(job, validator, processor, fileReader.Object);

            Assert.AreEqual(UploadJobStatus.FailedValidation, job.Status);

            // Clean up
            CoreDataRepository.DeleteCoreData(_jobGuid);
        }