private async Task WriteQueueRecordsAsync(ImportManagerQueueRecord managerQueueRecord, Dictionary <long, String> queueRecordToPopulate, List <ImportJobError> violations, DateTime timeOfInsert) { try { var dt = new DataTable(); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.JobID, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.WorkspaceArtifactID, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.ObjectType, typeof(String)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.JobType, typeof(String)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.MetaData, typeof(String)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.ImportRowID, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.QueueStatus, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.Priority, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.ResourceGroupID, typeof(Int32)); dt.Columns.Add(Constant.Sql.ColumnsNames.ImportWorkerQueue.TimeStampUTC, typeof(DateTime)); foreach (var record in queueRecordToPopulate) { dt.Rows.Add(managerQueueRecord.JobId, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.ObjectType, managerQueueRecord.JobType, record.Value, record.Key, Constant.Status.Queue.NOT_STARTED, managerQueueRecord.Priority, managerQueueRecord.ResourceGroupId, timeOfInsert); } await SqlQueryHelper.BulkInsertIntoTableAsync(AgentHelper.GetDBContext(-1), dt, Constant.Tables.ImportWorkerQueue, Constant.BatchSizes.ImportManagerIntoWorkerQueue); } catch (Exception ex) { var beginErrorLine = queueRecordToPopulate.Keys.Min(); var endErrorLine = queueRecordToPopulate.Keys.Max(); violations.Add(new ImportJobError() { Message = String.Format(Constant.ErrorMessages.ImportQueueManagerPopulatingImportWorkerQueueError, beginErrorLine, endErrorLine), LineNumber = null, Type = Constant.ImportUtilityJob.ErrorType.DataLevel, Details = ex.ToString() }); } }
private async Task UpdateJobStatisticsAsync(ImportManagerQueueRecord managerQueueRecord, Int32 expectedNumberOfImports) { if (managerQueueRecord.JobType == Constant.ImportUtilityJob.JobType.VALIDATE_SUBMIT) { await _artifactQueryHelper.UpdateRdoTextFieldValueAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, _rsapiRepositoryGroup.RdoRepository, managerQueueRecord.JobId, Constant.Guids.ObjectType.ImportUtilityJob, Constant.Guids.Field.ImportUtilityJob.Expected, expectedNumberOfImports); } }
public async Task ProcessRecordsAsync(ImportManagerQueueRecord importManagerQueueRecord) { RaiseMessage($"Processing record(s). [Table = {QueueTable}, ID = {RecordId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); switch (importManagerQueueRecord.JobType) { case Constant.ImportJobType.VALIDATE: await ValidateImportJobAsync(importManagerQueueRecord); break; case Constant.ImportJobType.IMPORT: await ImportJobAsync(importManagerQueueRecord); break; case Constant.ImportJobType.REVERT: await RevertJobAsync(); break; default: throw new MarkupUtilityException("Invalid Import Job Type"); } RaiseMessage($"Processed record(s). [Table = {QueueTable}, ID = {RecordId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); }
private async Task <Int32> PopulateWorkerQueueAsync(ImportManagerQueueRecord managerQueueRecord, IParser parser, IAdminObject migrationObject, List <ImportJobError> violations) { var loadFileColumns = parser.ParseColumns(); var workerQueue = new Dictionary <long, String>(); Int32 lineNumber = LongToInt(parser.LineNumber); var timeOfInsert = DateTime.UtcNow; try { while (parser.Read()) { try { var adminObjectDictionary = new Dictionary <String, String>(); foreach (var column in loadFileColumns) { var value = parser[column] == null ? String.Empty : parser[column].ToString(); adminObjectDictionary.Add(column, value); } await migrationObject.MapObjectColumnsAsync(adminObjectDictionary); var serializedJson = await _serializationHelper.SerializeAdminObjectAsync(migrationObject); workerQueue.Add(lineNumber, serializedJson); } catch (Exception ex) { violations.Add(new ImportJobError() { Message = String.Format(Constant.ErrorMessages.ImportQueueManagerLineNumberError, lineNumber, ex.Message), LineNumber = lineNumber >= Int32.MaxValue ? null : (Int32?)lineNumber, Type = Constant.ImportUtilityJob.ErrorType.DataLevel, Details = ex.ToString() }); } if ((workerQueue.Count % Constant.BatchSizes.ImportManagerIntoWorkerQueue) == 0) { await WriteQueueRecordsAsync(managerQueueRecord, workerQueue, violations, timeOfInsert); workerQueue.Clear(); } //The parser returns the current line from the Read above and advances the cursor to the next line. We want our logs should report the current line number of data values so linenumber should be updated last. //parser.LineNumber is -1 when the parser get to the end of the file and we don't want to report that value if (parser.LineNumber != -1) { lineNumber = LongToInt(parser.LineNumber); } } if (workerQueue.Any()) { await WriteQueueRecordsAsync(managerQueueRecord, workerQueue, violations, timeOfInsert); } return((lineNumber - 1) < 0 ? 0 : lineNumber - 1); //minus 1 because the header row should not be counted } catch (Exception ex) { // This is thrown instead of being recorded in the violation list because parser errors indicate load file issues, the user should updated the file before attempting the job again throw new AdminMigrationUtilityException(String.Format(Constant.ErrorMessages.UnableToParseLineNumberError, lineNumber), ex); } }
public override async Task ExecuteAsync() { //reset count properties _importFileRedactionCount = 0; _expectedRedactionCount = 0; try { //Check for jobs which stopped unexpectedly on this agent thread RaiseMessage($"Resetting records which failed. [Table = {QueueTable}]"); await ResetUnfishedJobsAsync(AgentHelper.GetDBContext(-1)); //Retrieve the next record to work on RaiseMessage($"Retrieving next record(s) in the queue. [Table = {QueueTable}]"); var commaDelimitedListOfResourceIds = GetCommaDelimitedListOfResourceIds(AgentResourceGroupIds); if (commaDelimitedListOfResourceIds != string.Empty) { var next = await RetrieveNextAsync(commaDelimitedListOfResourceIds); if (TableIsNotEmpty(next)) { var importManagerQueueRecord = new ImportManagerQueueRecord(next.Rows[0]); // Sets the workspaceArtifactID and RecordID so the agent will have access to them in case of an exception WorkspaceArtifactId = importManagerQueueRecord.WorkspaceArtifactId; RecordId = importManagerQueueRecord.Id; RaiseMessage($"Retrieved record(s) in the queue. [Table = {QueueTable}, ID = {RecordId}, Workspace Artifact ID = {WorkspaceArtifactId}]"); //set _importJobArtifactId _importJobArtifactId = importManagerQueueRecord.ImportJobArtifactId; //Process the record(s) await ProcessRecordsAsync(importManagerQueueRecord); //delete import job from queue await FinishAsync(); } else { RaiseMessage(Constant.AgentRaiseMessages.NO_RECORDS_IN_QUEUE_FOR_THIS_RESOURCE_POOL); } } else { RaiseMessage(Constant.AgentRaiseMessages.AGENT_SERVER_NOT_PART_OF_ANY_RESOURCE_POOL); } } catch (Exception ex) { var innerMostExceptionMessage = await ConstructDetailsExceptionMessageAsync(ex); //update import job status field to complete with errors and details field to inner most exception message await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.ERROR, innerMostExceptionMessage); //log error await LogErrorAsync(ex); } }
private async Task UpdateJobStatisticsAsync(ImportManagerQueueRecord managerQueueRecord, IEnumerable <ImportJobError> jobErrors, Int32 numberOfWorkerRecords) { var job = await GetImportJob(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); var expectedNumberOfImports = job[Constant.Guids.Field.ImportUtilityJob.Expected].ValueAsWholeNumber.GetValueOrDefault(); var imported = Utility.CalculateImportJobImports(jobErrors, expectedNumberOfImports, numberOfWorkerRecords); var notImported = Utility.CalculateImportJobObjectsThatWereNotImported(jobErrors, expectedNumberOfImports, numberOfWorkerRecords); await _artifactQueryHelper.UpdateImportJobStatistics(_rsapiRepositoryGroup.RdoRepository, _apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, imported, notImported); }
private async Task <MarkupUtilityImportJob> RetrieveImportJobAsync(ImportManagerQueueRecord importManagerQueueRecord) { RaiseMessage($"Retrieving import job. {_errorContext}]"); return(await ArtifactQueries.RetrieveImportJobAsync( AgentHelper.GetServicesManager(), ExecutionIdentity.CurrentUser, WorkspaceArtifactId, importManagerQueueRecord.ImportJobArtifactId)); }
private async Task ValidateImportJobAsync(ImportManagerQueueRecord importManagerQueueRecord) { _errorContext = $"[WorkspaceArtifactId = {WorkspaceArtifactId}, ImportJobArtifactId = {importManagerQueueRecord.ImportJobArtifactId}]"; RaiseMessage($"Validating import job. {_errorContext}"); //Set temporary file path to download file var tempFileName = $"MarkupUtilitiesImportFile_{Guid.NewGuid()}.csv"; var temporaryDirectory = GetTemporaryDirectory(); var tempFileLocation = Path.Combine(temporaryDirectory, tempFileName); try { //retrieve import job _markupUtilityImportJob = await RetrieveImportJobAsync(importManagerQueueRecord); //update status field of the import job to validating and details field to empty string await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.VALIDATING, string.Empty); //read contents of the import job file var fileContentsStream = await ReadImportJobFileContentsAsync(tempFileLocation); //validate contents of the import job file await ValidateImportJobFileContentsAsync(fileContentsStream); //update status field of the import job to validating and details field to empty string await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.VALIDATED, string.Empty); } catch (Exception ex) { RaiseMessage($"An exception occured when validating import job. {_errorContext}. [Error Message = {ex.Message}]"); var innerMostExceptionMessage = await ConstructDetailsExceptionMessageAsync(ex); //update status field of the import job to validation fail and details field to exception message await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.VALIDATION_FAILED, innerMostExceptionMessage); //log error await LogErrorAsync(ex); } finally { //Delete temp file if (File.Exists(tempFileLocation)) { File.Delete(tempFileLocation); } if (Directory.Exists(temporaryDirectory)) { Directory.Delete(temporaryDirectory); } } RaiseMessage($"Validated import job. {_errorContext}"); }
private async Task SendEmail(ImportManagerQueueRecord job, String status) { var importJob = await GetImportJob(job.WorkspaceArtifactId, job.JobId); var emailTo = importJob[Constant.Guids.Field.ImportUtilityJob.EmailAddresses].ValueAsFixedLengthText; var jobName = importJob[Constant.Guids.Field.ImportUtilityJob.Name].ValueAsFixedLengthText; String emailSubject = String.Format(Constant.EmailSubject, "import"); String emailBody = String.Format(Constant.EmailBody, "Import", jobName, job.WorkspaceName, job.WorkspaceArtifactId, status); if (!String.IsNullOrWhiteSpace(emailTo)) { await Emailer.EmailUtility.SendEmail(AgentHelper.GetDBContext(-1), emailTo, emailSubject, emailBody, new SmtpClientSettings(AgentHelper.GetDBContext(-1), SqlQueryHelper)); } }
public void Constructor_ReceivesTable_Initializes() { // Arrange var table = GetTable(); // Act var record = new ImportManagerQueueRecord(table.Rows[0]); // Assert Assert.AreEqual(2345678, record.WorkspaceArtifactId); Assert.AreEqual(1, record.RecordId); Assert.AreEqual(3456789, record.JobId); Assert.AreEqual(2, record.Priority); }
private void SetJobProperties(ImportManagerQueueRecord importManagerQueueRecord) { WorkspaceArtifactId = importManagerQueueRecord.WorkspaceArtifactId; TableRowId = importManagerQueueRecord.RecordId; }
private async Task PopulateWorkerQueueRecordsAsync(String delimitedListOfResourceGroupIds) { IParser parser = null; FileStream downloadedFile = null; ImportManagerQueueRecord managerQueueRecord = null; RDO importJob; var violations = new List <ImportJobError>(); try { DataTable next = await RetrieveNextAsync(delimitedListOfResourceGroupIds); if (TableIsNotEmpty(next)) { managerQueueRecord = new ImportManagerQueueRecord(next.Rows[0]); SetJobProperties(managerQueueRecord); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.IN_PROGRESS_MANAGER); RaiseAndLogDebugMessage($"Initializing Job {managerQueueRecord.JobId}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); importJob = await GetImportJob(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); var jobFileField = importJob[Constant.Guids.Field.ImportUtilityJob.ImportFile]; RaiseAndLogDebugMessage($"Downloading File {jobFileField.ValueAsFixedLengthText}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await DownloadFile(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, jobFileField.ArtifactID, LocalTempFilePath); RaiseAndLogDebugMessage($"Opening File {LocalTempFilePath}. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); downloadedFile = await OpenFileAsync(LocalTempFilePath); RaiseAndLogDebugMessage($"Creating Delimited File Parser. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); parser = await CreateParserAsync(downloadedFile, violations); RaiseAndLogDebugMessage($"Creating Import Object. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); //var migrationObject = await Utility.GetImportObjectSelection(managerQueueRecord.ObjectType, violations); var migrationObject = await GetMigrationObject(managerQueueRecord.ObjectType, violations); RaiseAndLogDebugMessage($"Validating load file columns. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await ValidateColumnsAsync(migrationObject, parser, violations); await ValidateFileDoesNotContainExtraColumnsAsync(migrationObject, parser, managerQueueRecord.ObjectType, violations); RaiseAndLogDebugMessage($"Validating load file minimum data requirement. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await ValidateFileHasMetaData(parser, violations); if (violations.Any()) { RaiseAndLogDebugMessage($"Violations found, recording them in the Job's error log. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, violations, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.ERROR); await ClearQueueRecords(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); await SendEmail(managerQueueRecord, Constant.Status.Job.ERROR); } else { try { RaiseAndLogDebugMessage($"Populating Worker Queue Table. [Table = {QueueTable}, ID = {managerQueueRecord.RecordId}, Workspace Artifact ID = {managerQueueRecord.WorkspaceArtifactId}]"); var numberProcessed = await PopulateWorkerQueueAsync(managerQueueRecord, parser, migrationObject, violations); await UpdateJobStatisticsAsync(managerQueueRecord, numberProcessed); await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, violations, managerQueueRecord.ObjectType); await SqlQueryHelper.UpdateQueueStatusAsync(AgentHelper.GetDBContext(-1), managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Tables.ImportManagerQueue, Constant.Sql.ColumnsNames.ImportManagerQueue.QueueStatus, Constant.Status.Queue.WAITING_FOR_WORKERS_TO_FINISH); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.COMPLETED_MANAGER); } catch (Exception ex) { var error = new ImportJobError() { Message = ex.Message, Type = Constant.ImportUtilityJob.ErrorType.FileLevel, Details = ex.ToString(), LineNumber = null }; await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, new[] { error }, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.ERROR); await ClearQueueRecords(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId); await SendEmail(managerQueueRecord, Constant.Status.Job.ERROR); } } } else { RaiseAndLogDebugMessage("No records in the queue for this resource pool."); } } catch (Exception ex) { if (managerQueueRecord != null) { var error = new ImportJobError() { Message = ex.Message, Type = Constant.ImportUtilityJob.ErrorType.JobLevel, LineNumber = null, Details = ex.ToString() }; await _artifactQueryHelper.CreateImportJobErrorRecordsAsync(_apiOptions, managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, _rsapiRepositoryGroup.RdoRepository, new[] { error }, managerQueueRecord.ObjectType); await UpdateJobStatusAsync(managerQueueRecord.WorkspaceArtifactId, managerQueueRecord.JobId, Constant.Status.Job.RETRY); } throw; } finally { if (parser != null) { parser.Dispose(); } if (downloadedFile != null) { downloadedFile.Close(); } } }
private async Task ImportJobAsync(ImportManagerQueueRecord importManagerQueueRecord) { _errorContext = $"[WorkspaceArtifactId = {WorkspaceArtifactId}, ImportJobArtifactId = {importManagerQueueRecord.ImportJobArtifactId}]"; RaiseMessage($"Processing import job. {_errorContext}"); //Set temporary file path to download file var tempFileName = $"MarkupUtilitiesImportFile_{Guid.NewGuid()}.csv"; var temporaryDirectory = GetTemporaryDirectory(); var tempFileLocation = Path.Combine(temporaryDirectory, tempFileName); try { //Create import manager holding table await CreateImportManagerHoldingTableAsync(); //retrieve import job _markupUtilityImportJob = await RetrieveImportJobAsync(importManagerQueueRecord); //update status field of the import job to manager in progress and details field to empty string await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.IN_PROGRESS_MANAGER, string.Empty); //read contents of the import job file var fileContentsStream = await ReadImportJobFileContentsAsync(tempFileLocation); //parse import job file for contents await ParseImportJobFileContentsAsync(fileContentsStream); //bulk copy data from import manager holding table to import worker queue table await BulkCopyDataFromImportManagerHoldingTableIntoImportWorkerQueueTableAsync(); //update status field of the import job to manager complete and details field to empty string await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.COMPLETED_MANAGER, string.Empty); } catch (Exception ex) { RaiseMessage($"An exception occured when processing import job. {_errorContext}. [Error Message = {ex.Message}]"); var innerMostExceptionMessage = await ConstructDetailsExceptionMessageAsync(ex); //update status field of the import job to parsing fail and details field to exception message await UpdateImportJobStatusAndDetailsFieldAsync(Constant.Status.Job.ERROR, innerMostExceptionMessage); //log error await LogErrorAsync(ex); } finally { //drop import manager holding table await DropImportManagerHoldingTableAsync(); //Delete temp file if (File.Exists(tempFileLocation)) { File.Delete(tempFileLocation); } if (Directory.Exists(temporaryDirectory)) { Directory.Delete(temporaryDirectory); } } RaiseMessage($"Processed import job. {_errorContext}"); }