private async Task <IAdminObject> GetMigrationObject(String objectType, List <ImportJobError> violations) { IAdminObject migrationObject = null; try { migrationObject = await Utility.GetImportObjectSelectionAsync(objectType); } catch (Exception ex) { var error = new ImportJobError() { Message = ex.Message, Type = Constant.ImportUtilityJob.ErrorType.JobLevel, Details = ex.ToString(), LineNumber = null }; violations.Add(error); } return(migrationObject); }
private async Task <IParser> CreateParserAsync(FileStream downloadedFile, List <ImportJobError> violations) { return(await Task.Run(() => { DelimitedFileParser retVal = null; try { retVal = new DelimitedFileParser(downloadedFile, Constant.CommaSeparator); } catch (Exception ex) { var error = new ImportJobError() { Message = ex.Message, Details = ex.ToString(), Type = Constant.ImportUtilityJob.ErrorType.FileLevel, LineNumber = null }; violations.Add(error); } return retVal; })); }
public async Task ProcessRecordsAsync(IHelper helper, IDBContext eddsDbContext, IRsapiRepositoryGroup repositoryGroup, ISqlQueryHelper sqlQueryHelper, ImportWorkerQueueRecord record, List <ImportJobError> errors) { var currentRecordErrors = new List <ImportJobError>(); IAdminObject migrationObject = null; try { migrationObject = await _serializationHelper.DeserializeToAdminObjectAsync(record.MetaData); } catch (Exception ex) { var serializationError = new ImportJobError() { Message = Constant.ErrorMessages.GeneralDeSerializationError, Details = ex.ToString(), Type = Constant.ImportUtilityJob.ErrorType.DataLevel, LineNumber = record.ImportRowID }; currentRecordErrors.Add(serializationError); } if (migrationObject != null && !currentRecordErrors.Any()) { var violations = new List <String>(); violations.AddRange(await migrationObject.ValidateAsync(_apiOptions, repositoryGroup, _artifactQueryHelper, eddsDbContext, sqlQueryHelper)); if (record.JobType == Constant.ImportUtilityJob.JobType.VALIDATE_SUBMIT && !violations.Any()) { violations.AddRange(await migrationObject.ImportAsync(_apiOptions, repositoryGroup, _artifactQueryHelper, helper, eddsDbContext, sqlQueryHelper)); } currentRecordErrors.AddRange( violations.Select(x => new ImportJobError() { Message = x, Type = Constant.ImportUtilityJob.ErrorType.DataLevel, LineNumber = record.ImportRowID }) ); } errors.AddRange(currentRecordErrors); }
private async Task PopulateWorkerQueueRecordsAsync(String delimitedListOfResourceGroupIds) { IParser parser = null; FileStream downloadedFile = null; ImportManagerQueueRecord managerQueueRecord = null; RDO importJob; var violations = new List <ImportJobError>(); try { DataTable next = await RetrieveNextAsync(delimitedListOfResourceGroupIds); if (TableIsNotEmpty(next)) { managerQueueRecord = new ImportManagerQueueRecord(next.Rows[0]); SetJobProperties(managerQueueRecord); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.IN_PROGRESS_MANAGER); RaiseAndLogDebugMessage($"Initializing Job {managerQueueRecord.JobId}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); importJob = await GetImportJob(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); var jobFileField = importJob[Constant.Guids.Field.ImportUtilityJob.ImportFile]; RaiseAndLogDebugMessage($"Downloading File {jobFileField.ValueAsFixedLengthText}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await DownloadFile(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, jobFileField.ArtifactID, LocalTempFilePath); RaiseAndLogDebugMessage($"Opening File {LocalTempFilePath}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); downloadedFile = await OpenFileAsync(LocalTempFilePath); RaiseAndLogDebugMessage($"Creating Delimited File Parser. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); parser = await CreateParserAsync(downloadedFile, violations); RaiseAndLogDebugMessage($"Creating Import Object. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); //var migrationObject = await Utility.GetImportObjectSelection(managerQueueRecord.ObjectType, violations); var migrationObject = await GetMigrationObject(managerQueueRecord.ObjectType, violations); RaiseAndLogDebugMessage($"Validating load file columns. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await ValidateColumnsAsync(migrationObject, parser, violations); await ValidateFileDoesNotContainExtraColumnsAsync(migrationObject, parser, managerQueueRecord.ObjectType, violations); RaiseAndLogDebugMessage($"Validating load file minimum data requirement. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await ValidateFileHasMetaData(parser, violations); if (violations.Any()) { RaiseAndLogDebugMessage($"Violations found, recording them in the Job's error log. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, violations, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.ERROR); await ClearQueueRecords(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); await SendEmail(managerQueueRecord, Constant.Status.Job.ERROR); } else { try { RaiseAndLogDebugMessage($"Populating Worker Queue Table. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); var numberProcessed = await PopulateWorkerQueueAsync(managerQueueRecord, parser, migrationObject, violations); await UpdateJobStatisticsAsync(managerQueueRecord, numberProcessed); await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, violations, managerQueueRecord.ObjectType); await SqlQueryHelper.UpdateQueueStatusAsync(AgentHelper.GetDBContext(-1), managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Tables.ImportManagerQueue, Constant.Sql.ColumnsNames.ImportManagerQueue.QueueStatus, Constant.Status.Queue.WAITING_FOR_WORKERS_TO_FINISH); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.COMPLETED_MANAGER); } catch (Exception ex) { var error = new ImportJobError() { Message = ex.Message, Type = Constant.ImportUtilityJob.ErrorType.FileLevel, Details = ex.ToString(), LineNumber = null }; await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, new[] { error }, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.ERROR); await ClearQueueRecords(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); await SendEmail(managerQueueRecord, Constant.Status.Job.ERROR); } } } else { RaiseAndLogDebugMessage("No records in the queue for this resource pool."); } } catch (Exception ex) { if (managerQueueRecord != null) { var error = new ImportJobError() { Message = ex.Message, Type = Constant.ImportUtilityJob.ErrorType.JobLevel, LineNumber = null, Details = ex.ToString() }; await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, new[] { error }, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.RETRY); } throw; } finally { if (parser != null) { parser.Dispose(); } if (downloadedFile != null) { downloadedFile.Close(); } } }
public override async Task ExecuteAsync() { if (await IsOffHoursAsync(ProcessedOnDateTime)) { //Check for jobs which stopped unexpectedly on this agent thread RaiseAndLogDebugMessage($"Resetting records which failed. [Table = {QueueTable}]"); await ResetUnfinishedJobsAsync(AgentHelper.GetDBContext(-1)); //Retrieve the next record to work on RaiseAndLogDebugMessage($"Retrieving next record(s) in the queue. [Table = {QueueTable}]"); var delimitedListOfResourceGroupIds = GetCommaDelimitedListOfResourceIds(AgentResourceGroupIds); if (delimitedListOfResourceGroupIds != String.Empty) { DataTable batch = await RetrieveBatchAsync(delimitedListOfResourceGroupIds); if (TableIsNotEmpty(batch)) { ImportWorkerQueueRecord record = null; var errors = new List <ImportJobError>(); foreach (DataRow row in batch.Rows) { try { record = new ImportWorkerQueueRecord(row); SetJobProperties(record); RaiseAndLogDebugMessage($"Retrieved record(s) in the queue. [Table = {QueueTable}, ID = {TableRowId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); //Update Status var status = await GetImportJobStatus(record.WorkspaceArtifactID, record.JobID); if (status == Constant.Status.Job.COMPLETED_MANAGER) { await UpdateJobStatus(record.WorkspaceArtifactID, record.JobID, Constant.Status.Job.IN_PROGRESS_WORKER); } //Process the record(s) RaiseAndLogDebugMessage($"Processing record(s). [Table = {QueueTable}, ID = {TableRowId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); await ProcessRecordsAsync(AgentHelper, AgentHelper.GetDBContext(-1), _repositoryGroup, SqlQueryHelper, record, errors); RaiseAndLogDebugMessage($"Processed record(s). [Table = {QueueTable}, ID = {TableRowId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); } catch (Exception ex) { if (record != null) { var exceptionError = new ImportJobError() { Message = String.Format(Constant.ErrorMessages.ImportWorkerLineNumberError, record.ImportRowID, ex.Message), Type = Constant.ImportUtilityJob.ErrorType.DataLevel, LineNumber = record.ImportRowID }; errors.Add(exceptionError); } else { throw; } } } if (record != null) { if (errors.Any()) { RaiseAndLogDebugMessage($"Recording Error(s). [Table = {QueueTable}, ID = {TableRowId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, record.WorkspaceArtifactID, record.JobID, _repositoryGroup.RdoRepository, errors, record.ObjectType); } RaiseAndLogDebugMessage($"Deleting Batch. [Table = {QueueTable}, ID = {TableRowId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); await SqlQueryHelper.DeleteRecordFromQueueAsync(AgentHelper.GetDBContext(-1), AgentId, record.WorkspaceArtifactID, record.JobID, Constant.Tables.ImportWorkerQueue); } } else { RaiseAndLogDebugMessage("No records in the queue for this resource pool."); } } else { RaiseAndLogDebugMessage("This agent server is not part of any resource pools. Agent execution skipped."); } } else { RaiseAndLogDebugMessage($"Current time is not between {OffHoursStartTime} and {OffHoursEndTime}. Agent execution skipped."); } }