public async Task ImportFiltersAndLocations(DataColumnCollection cols, DataRowCollection rows, SubjectMeta subjectMeta, StatisticsDbContext context, Guid releaseId, string dataFileName) { // Clearing the caches is required here as the seeder shares the cache with all subjects _importerFilterService.ClearCache(); _importerLocationService.ClearCache(); var headers = CsvUtil.GetColumnValues(cols); var rowCount = 1; var totalRows = rows.Count; foreach (DataRow row in rows) { if (rowCount % STAGE_2_ROW_CHECK == 0) { var currentStatus = await _importStatusService.GetImportStatus(releaseId, dataFileName); if (currentStatus.IsFinishedOrAborting()) { _logger.LogInformation($"Import for {dataFileName} has finished or is being aborted, " + $"so finishing importing Filters and Locations early"); return; } await _importStatusService.UpdateStatus(releaseId, dataFileName, IStatus.STAGE_2, (double)rowCount / totalRows * 100); } CreateFiltersAndLocationsFromCsv(context, CsvUtil.GetRowValues(row), headers, subjectMeta.Filters); rowCount++; } }
public SubjectMeta Get(DataColumnCollection cols, DataRowCollection rows, Subject subject, StatisticsDbContext context) { var metaRows = GetMetaRows(CsvUtil.GetColumnValues(cols), rows); var filters = GetFilters(metaRows, subject, context).ToList(); var indicators = GetIndicators(metaRows, subject, context).ToList(); return(new SubjectMeta { Filters = filters, Indicators = indicators }); }
public async Task ImportObservations(DataColumnCollection cols, DataRowCollection rows, Subject subject, SubjectMeta subjectMeta, int batchNo, int rowsPerBatch, StatisticsDbContext context) { _importerFilterService.ClearCache(); _importerLocationService.ClearCache(); var observations = GetObservations( context, rows, CsvUtil.GetColumnValues(cols), subject, subjectMeta, batchNo, rowsPerBatch).ToList(); await InsertObservations(context, observations); }