public void ProcessJob(UploadJob job, IWorksheetNameValidator nameValidator, IBatchWorksheetDataProcessor processor, IExcelFileReader excelFileReader) { try { _jobRepository = new UploadJobRepository(); _jobErrorRepository = new UploadJobErrorRepository(); var batchUpload = ToBatchUpload(job); jobStatus = new StatusHelper(_jobRepository, _logger); _logger.Info("Job# {0} current status is {1} ", job.Guid, job.Status); // If user wants to override duplications if (job.Status == UploadJobStatus.ConfirmationGiven) { jobStatus.InProgress(job); // Read indicators in datatable var batchDataTable = GetBatchData(excelFileReader); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, batchDataTable, _jobRepository, true); // //Perform validation once again to get the list // of duplicate rows in database // processor.Validate(batchDataTable, batchUpload); // Remove duplications in file CheckDuplicateRowsInWorksheet(job, batchUpload, ref batchDataTable); // Archive rows processor.ArchiveDuplicates(batchUpload.DuplicateRowInDatabaseErrors, job); // Upload data to core data set UploadDataToCoreDataSet(job, processor, batchDataTable); } else // If we have a new job { jobStatus.InProgress(job); // Get worksheets from file var worksheets = excelFileReader.GetWorksheets(); UpdateJobProgress(job, ProgressStage.ValidatingWorksheets); // Check worksheet names are correct var worksheetsOk = CheckWorksheets(job, nameValidator, worksheets); if (!worksheetsOk) return; var batchDataTable = GetBatchData(excelFileReader); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, batchDataTable, _jobRepository, true); UpdateJobProgress(job, ProgressStage.ValidatingData); processor.Validate(batchDataTable, batchUpload); var indicatorIdsInBatch = processor.GetIndicatorIdsInBatch(); UpdateJobProgress(job, ProgressStage.CheckingPermission); // Check user permission for indicators var permissionsOk = CheckPermission(job, indicatorIdsInBatch); if (!permissionsOk) return; UpdateJobProgress(job, ProgressStage.CheckingDuplicationInFile); // Check for duplications in file CheckDuplicateRowsInWorksheet(job, batchUpload, ref batchDataTable); // Check validation errors var validationOk = CheckValidationFailures(job, batchUpload); if (!validationOk) return; UpdateJobProgress(job, ProgressStage.CheckingDuplicationInDb); // Check for duplications database rows var haveDuplicates = CheckDuplicateRowsInDatabase(job, batchUpload); if (haveDuplicates) return; UploadDataToCoreDataSet(job, processor, batchDataTable); } } catch (Exception ex) { jobStatus.UnexpectedError(job); _logger.Error(ex); } }
private void UploadDataToCoreDataSet(UploadJob job, IBatchWorksheetDataProcessor processor, DataTable batchDataTable) { // Upload to DB var rowsUploaded = processor.UploadData(batchDataTable, job).DataToUpload.Count; // All good job completed jobStatus.SuccessfulUpload(job, rowsUploaded); UpdateJobProgress(job, ProgressStage.WrittingToDb); }