public static void UpdateNumberOfRowsInFile(UploadJob job, DataTable dataTable, UploadJobRepository repository, bool isSimpleUpload) { var rowCount = 0; if (isSimpleUpload) { for (var i = 0; i < dataTable.Rows.Count; i++) { var row = dataTable.Rows[i]; var rowParser = new UploadSimpleRowParser(row); if (rowParser.DoesRowContainData == false) { break; } rowCount++; } } else { rowCount = dataTable.Rows.Count; } job.TotalRows = rowCount; repository.UpdateJob(job); }
public bool Check(List<int> indicators, UploadJob job, UploadJobErrorRepository errorRepository) { // Check permission CheckIndicatorPermissionForCurrentUser(indicators, job.UserId); // Check if all indicator exist if (noneExistingIndicators.Count > 0) { var error = job.JobType == UploadJobType.Simple ? ErrorBuilder.GetSimplePermissionError(job.Guid, noneExistingIndicators, false) : ErrorBuilder.GetBatchPermissionError(job.Guid, noneExistingIndicators, false); errorRepository.Log(error); return false; } // Check permissions if (indicatorsWithoutPermission.Count > 0) { var error = job.JobType == UploadJobType.Simple ? ErrorBuilder.GetSimplePermissionError(job.Guid, indicatorsWithoutPermission, true) : ErrorBuilder.GetBatchPermissionError(job.Guid, indicatorsWithoutPermission, true); errorRepository.Log(error); return false; } return true; }
public void InProgress(UploadJob job) { job.Status = UploadJobStatus.InProgress; job.TotalRowsCommitted = 0; _jobRepository.UpdateJob(job); _logger.Info("Job# {0} status changed to {1} ", job.Guid, job.Status); }
public void SuccessfulUpload(UploadJob job, int rowsUploaded) { job.Status = UploadJobStatus.SuccessfulUpload; job.TotalRowsCommitted = rowsUploaded; _jobRepository.UpdateJob(job); _logger.Info("Job# {0} successfully completed", job.Guid); }
protected UploadJob GetJob(UploadJobType jobType, Guid jobGuid) { var job = new UploadJob { Guid = jobGuid, Status = UploadJobStatus.NotStart, DateCreated = DateTime.Now, JobType = jobType, Filename = @"fake.xls", UserId = 11, Username = @"phe\doris.hain" }; return job; }
public void UpdateJob(UploadJob uploadJob) { try { transaction = CurrentSession.BeginTransaction(); CurrentSession.Update(uploadJob); CurrentSession.Flush(); CurrentSession.Refresh(uploadJob); transaction.Commit(); } catch (Exception exception) { HandleException(exception); } }
public SimpleUpload UploadData(DataTable indicatorDetails, DataTable pholioData, UploadJob job) { var simpleUpload = new SimpleUpload(); var rowCount = 0; var allowedData = new AllowedData(_profilesReader); // TODO: find out why were are calling it again here as its caled in Validate() ValidateIndicatorDetails(indicatorDetails, simpleUpload, allowedData); var dataToUpload = new List<UploadDataModel>(); for (int i = 0; i < pholioData.Rows.Count; i++) { DataRow row = pholioData.Rows[i]; var rowParser = new UploadSimpleRowParser(row); if (rowParser.DoesRowContainData == false) { //There isn't an area code or value so assume the end of the data break; } rowCount++; var upload = rowParser.GetUploadDataModelWithUnparsedValuesSetToDefaults(simpleUpload); _coreDataRepository.InsertCoreData(upload.ToCoreDataSet(), job.Guid); dataToUpload.Add(upload); } simpleUpload.DataToUpload = dataToUpload; int uploadId = _loggingRepository.InsertUploadAudit(job.Guid, job.Username, rowCount, job.Filename, WorksheetNames.SimplePholio); simpleUpload.ShortFileName = Path.GetFileName(job.Filename); simpleUpload.TotalDataRows = rowCount; simpleUpload.UploadBatchId = job.Guid; simpleUpload.Id = uploadId; return simpleUpload; }
public UploadJob SaveJob(UploadJob job) { try { transaction = CurrentSession.BeginTransaction(); CurrentSession.Save(job); CurrentSession.Flush(); CurrentSession.Refresh(job); transaction.Commit(); } catch (Exception exception) { HandleException(exception); } return job; }
public void StartJob(UploadJob job) { var validator = new WorksheetNameValidator(); var actualFilePath = FilePathHelper.GetActualFilePath(job); var excelFileReader = new ExcelFileReader(actualFilePath); if (job.JobType == UploadJobType.Simple) { SetUsername(job); _logger.Info("Processing at Simple upload for {0} and jobid# is {1}", job.Username, job.Guid); var worker = new SimpleJobWorker(); var processor = new SimpleWorksheetDataProcessor(_coreDataRepository, _loggingRepository); worker.ProcessJob(job, validator, processor, excelFileReader); } else { SetUsername(job); _logger.Info("Processing at Batch upload for {0} and jobid# is {1}", job.Username, job.Guid); var worker = new BatchJobWorker(); var processor = new BatchWorksheetDataProcessor(_coreDataRepository, _loggingRepository); worker.ProcessJob(job, validator, processor, excelFileReader); } }
private void UpdateJobProgress(UploadJob job, ProgressStage stage) { job.ProgressStage = stage; _jobRepository.UpdateJob(job); }
private bool CheckWorksheets(UploadJob job, IWorksheetNameValidator nameValidator, List<string> worksheets) { if (nameValidator.ValidateBatch(worksheets)) return true; jobStatus.FailedValidation(job); var error = ErrorBuilder.GetWorkSheetNameValidationError(job); _jobErrorRepository.Log(error); _logger.Info("Job# {0} doesn't have required worksheets", job.Guid); _logger.Info("Job# {0} status changed to {1} ", job.Guid, job.Status); return false; }
private bool CheckPermission(UploadJob job, List<int> indicatorIdsInBatch) { var hasPermission = new IndicatorPermission().Check(indicatorIdsInBatch, job, _jobErrorRepository); if (hasPermission) return true; jobStatus.FailedValidation(job); _logger.Info("Job# {0}, User doesn't have permission for indicator", job.Guid); return false; }
private void UploadDataToCoreDataSet(UploadJob job, ISimpleWorksheetDataProcessor processor, DataTable indicatorDetails, DataTable pholioData) { // Upload to DB var rowsUploaded = processor.UploadData(indicatorDetails, pholioData, job).DataToUpload.Count; // All good job done jobStatus.SuccessfulUpload(job, rowsUploaded); }
private UploadJob CreateJob(Guid guid) { var newJob = new UploadJob { Guid = guid, Username = @"phe\farrukh", Filename = "Fake.xls", DateCreated = DateTime.Now, JobType = UploadJobType.Simple, TotalRows = 0, Status = UploadJobStatus.NotStart }; return newJob; }
public void ProcessJob(UploadJob job, IWorksheetNameValidator nameValidator, IBatchWorksheetDataProcessor processor, IExcelFileReader excelFileReader) { try { _jobRepository = new UploadJobRepository(); _jobErrorRepository = new UploadJobErrorRepository(); var batchUpload = ToBatchUpload(job); jobStatus = new StatusHelper(_jobRepository, _logger); _logger.Info("Job# {0} current status is {1} ", job.Guid, job.Status); // If user wants to override duplications if (job.Status == UploadJobStatus.ConfirmationGiven) { jobStatus.InProgress(job); // Read indicators in datatable var batchDataTable = GetBatchData(excelFileReader); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, batchDataTable, _jobRepository, true); // //Perform validation once again to get the list // of duplicate rows in database // processor.Validate(batchDataTable, batchUpload); // Remove duplications in file CheckDuplicateRowsInWorksheet(job, batchUpload, ref batchDataTable); // Archive rows processor.ArchiveDuplicates(batchUpload.DuplicateRowInDatabaseErrors, job); // Upload data to core data set UploadDataToCoreDataSet(job, processor, batchDataTable); } else // If we have a new job { jobStatus.InProgress(job); // Get worksheets from file var worksheets = excelFileReader.GetWorksheets(); UpdateJobProgress(job, ProgressStage.ValidatingWorksheets); // Check worksheet names are correct var worksheetsOk = CheckWorksheets(job, nameValidator, worksheets); if (!worksheetsOk) return; var batchDataTable = GetBatchData(excelFileReader); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, batchDataTable, _jobRepository, true); UpdateJobProgress(job, ProgressStage.ValidatingData); processor.Validate(batchDataTable, batchUpload); var indicatorIdsInBatch = processor.GetIndicatorIdsInBatch(); UpdateJobProgress(job, ProgressStage.CheckingPermission); // Check user permission for indicators var permissionsOk = CheckPermission(job, indicatorIdsInBatch); if (!permissionsOk) return; UpdateJobProgress(job, ProgressStage.CheckingDuplicationInFile); // Check for duplications in file CheckDuplicateRowsInWorksheet(job, batchUpload, ref batchDataTable); // Check validation errors var validationOk = CheckValidationFailures(job, batchUpload); if (!validationOk) return; UpdateJobProgress(job, ProgressStage.CheckingDuplicationInDb); // Check for duplications database rows var haveDuplicates = CheckDuplicateRowsInDatabase(job, batchUpload); if (haveDuplicates) return; UploadDataToCoreDataSet(job, processor, batchDataTable); } } catch (Exception ex) { jobStatus.UnexpectedError(job); _logger.Error(ex); } }
public void FailedValidation(UploadJob job) { job.Status = UploadJobStatus.FailedValidation; _jobRepository.UpdateJob(job); _logger.Info("Job# {0} status changed to {1} ", job.Guid, job.Status); }
public void ConfirmationAwaited(UploadJob job) { job.Status = UploadJobStatus.ConfirmationAwaited; _jobRepository.UpdateJob(job); _logger.Info("Job# {0} status changed to {1} ", job.Guid, job.Status); }
public void UnexpectedError(UploadJob job) { job.Status = UploadJobStatus.UnexpectedError; _jobRepository.UpdateJob(job); _logger.Info("Job# {0} status changed to {1} ", job.Guid, job.Status); }
public static string GetActualFilePath(UploadJob job) { var ext = Path.GetExtension(job.Filename); var fileOnDisk = Path.Combine(AppConfig.GetUploadFolder(), job.Guid + ext); return fileOnDisk; }
private void SetUsername(UploadJob job) { var fpmUser = _profilesReader.GetUserByUserId(job.UserId); job.Username = fpmUser.UserName; }
public void ProcessJob(UploadJob job, IWorksheetNameValidator nameValidator, ISimpleWorksheetDataProcessor processor, IExcelFileReader excelFileReader) { try { _jobRepository = new UploadJobRepository(); _jobErrorRepository = new UploadJobErrorRepository(); // Create SimpleUpload object from job var simpleUpload = ToSimpleUpload(job); jobStatus = new StatusHelper(_jobRepository, _logger); _logger.Info("Job# {0} current status is {1} ", job.Guid, job.Status); if (job.Status == UploadJobStatus.ConfirmationGiven) { jobStatus.InProgress(job); // Get indicator details worksheet as data table var indicatorDetails = excelFileReader.GetIndicatorDetails(); // Get pholio data worksheet as data table var pholioData = excelFileReader.GetPholioData(); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, pholioData, _jobRepository, true); // Validate the Data processor.Validate(indicatorDetails, pholioData, simpleUpload); // Remove duplicate rows CheckDuplicateRowsInWorksheet(job, simpleUpload, ref pholioData); // Archive rows processor.ArchiveDuplicates(simpleUpload.DuplicateRowInDatabaseErrors, job); // Upload data to core data set UploadDataToCoreDataSet(job, processor, indicatorDetails, pholioData); } else { // Update the job status to in progress jobStatus.InProgress(job); // Get worksheets from file var worksheets = excelFileReader.GetWorksheets(); // Check worksheet names are correct var worksheetsOk = CheckWorksheets(job, worksheets, nameValidator); if (!worksheetsOk) return; // Get indicator details worksheet as data table var indicatorDetails = excelFileReader.GetIndicatorDetails(); // Get pholio data worksheet as data table var pholioData = excelFileReader.GetPholioData(); // Save the total number of rows in file WorkerHelper.UpdateNumberOfRowsInFile(job, pholioData, _jobRepository, true); // Validate the Data processor.Validate(indicatorDetails, pholioData, simpleUpload); // Check user permission for indicator var indicatorIds = new List<int> { simpleUpload.IndicatorId }; var permissionsOk = CheckPermission(indicatorIds, job); if (!permissionsOk) return; // Check for duplications in file, if there will be any duplicate rows // we will remove them. CheckDuplicateRowsInWorksheet(job, simpleUpload, ref pholioData); // Check validation errors var validationOk = CheckValidationFailures(job, simpleUpload); if (!validationOk) return; // Check for duplications database rows var haveDuplicates = CheckDuplicateRowsInDatabase(job, simpleUpload); if (haveDuplicates) return; // Upload to DB UploadDataToCoreDataSet(job, processor, indicatorDetails, pholioData); } } catch (Exception ex) { jobStatus.UnexpectedError(job); _logger.Error(ex); } }
private void UploadDataToCoreDataSet(UploadJob job, IBatchWorksheetDataProcessor processor, DataTable batchDataTable) { // Upload to DB var rowsUploaded = processor.UploadData(batchDataTable, job).DataToUpload.Count; // All good job completed jobStatus.SuccessfulUpload(job, rowsUploaded); UpdateJobProgress(job, ProgressStage.WrittingToDb); }
private BatchUpload ToBatchUpload(UploadJob job) { var batchUpload = new BatchUpload { ShortFileName = job.Filename, FileName = job.Filename, DataToUpload = new List<UploadDataModel>(), DuplicateRowInDatabaseErrors = new List<DuplicateRowInDatabaseError>(), DuplicateRowInSpreadsheetErrors = new List<DuplicateRowInSpreadsheetError>(), ExcelDataSheets = new List<UploadExcelSheet>(), UploadValidationFailures = new List<UploadValidationFailure>() }; return batchUpload; }
private bool CheckDuplicateRowsInDatabase(UploadJob job, BatchUpload batchUpload) { if (batchUpload.DuplicateRowInDatabaseErrors.Count <= 0) return false; jobStatus.ConfirmationAwaited(job); var error = ErrorBuilder.GetDuplicateRowInDatabaseError(job.Guid, batchUpload.DuplicateRowInDatabaseErrors); _jobErrorRepository.Log(error); _logger.Info("Job# {0}, There are duplicate rows in database", job.Guid); return true; }
public void ArchiveDuplicates(IEnumerable<DuplicateRowInDatabaseError> duplicateRows, UploadJob job) { _coreDataRepository.InsertCoreDataArchive(duplicateRows, job.Guid); }
private bool CheckValidationFailures(UploadJob job, BatchUpload batchUpload) { if (batchUpload.UploadValidationFailures.Count <= 0) return true; jobStatus.FailedValidation(job); var error = ErrorBuilder.GetConversionError(job.Guid, batchUpload.UploadValidationFailures); _jobErrorRepository.Log(error); _logger.Info("Job# {0}, Data type conversion errors occurred ", job.Guid); return false; }
private void AddNewJob(UploadJob newJob) { _uploadJobRepository.SaveJob(newJob); }
private void CheckDuplicateRowsInWorksheet(UploadJob job, BatchUpload batchUpload, ref DataTable batchDataTable) { if (batchUpload.DuplicateRowInSpreadsheetErrors.Count > 0) { var dataWithoutDuplicates = new FileDuplicationHandler().RemoveDuplicatesInBatch(batchDataTable); batchDataTable = dataWithoutDuplicates; _logger.Info("Job# {0}, There are duplicate rows in spreadsheet", job.Guid); _logger.Info("Job# {0}, Dupllicate rows removed", job.Guid); } }
public static UploadJobError GetWorkSheetNameValidationError(UploadJob job) { const string simpleWorksheetName = @" ""Pholio"" worksheet."; const string batchWorksheetName = @" ""IndicatorDetails"" and ""PholioData"" worksheets."; var simpleOrBatchText = job.JobType == UploadJobType.Simple ? simpleWorksheetName : batchWorksheetName; var errorText = new StringBuilder(); errorText.Append("The uploaded spreadsheet must contain"); errorText.Append(simpleOrBatchText); var error = CreateError(job.Guid, UploadJobErrorType.WorkSheetValidationError, errorText.ToString(), null); return error; }
private void CheckDuplicateRowsInWorksheet(UploadJob job, SimpleUpload simpleUpload, ref DataTable pholioData) { if (simpleUpload.DuplicateRowInSpreadsheetErrors.Count > 0) { var dataWithoutDuplicates = new FileDuplicationHandler().RemoveDuplicatesInSimple(pholioData); pholioData = dataWithoutDuplicates; _logger.Info("Job# {0}, There are duplicate rows in spreadsheet", job.Guid); _logger.Info("Job# {0}, Dupllicate rows removed", job.Guid); } }