private IEnumerable <Observation> GetObservations( StatisticsDbContext context, DataRowCollection rows, List <string> headers, Subject subject, SubjectMeta subjectMeta, int batchNo, int rowsPerBatch ) { var observations = new List <Observation>(); var i = 0; foreach (DataRow row in rows) { var o = ObservationFromCsv( context, CsvUtil.GetRowValues(row).ToArray(), headers, subject, subjectMeta, ((batchNo - 1) * rowsPerBatch) + i++ + 2); if (!IgnoredGeographicLevels.Contains(o.GeographicLevel)) { observations.Add(o); } } return(observations); }
public async Task ImportFiltersAndLocations(DataColumnCollection cols, DataRowCollection rows, SubjectMeta subjectMeta, StatisticsDbContext context, Guid releaseId, string dataFileName) { // Clearing the caches is required here as the seeder shares the cache with all subjects _importerFilterService.ClearCache(); _importerLocationService.ClearCache(); var headers = CsvUtil.GetColumnValues(cols); var rowCount = 1; var totalRows = rows.Count; foreach (DataRow row in rows) { if (rowCount % STAGE_2_ROW_CHECK == 0) { var currentStatus = await _importStatusService.GetImportStatus(releaseId, dataFileName); if (currentStatus.IsFinishedOrAborting()) { _logger.LogInformation($"Import for {dataFileName} has finished or is being aborted, " + $"so finishing importing Filters and Locations early"); return; } await _importStatusService.UpdateStatus(releaseId, dataFileName, IStatus.STAGE_2, (double)rowCount / totalRows * 100); } CreateFiltersAndLocationsFromCsv(context, CsvUtil.GetRowValues(row), headers, subjectMeta.Filters); rowCount++; } }
public static int GetYear(IReadOnlyList <string> line, List <string> headers) { var tp = CsvUtil.Value(line, headers, "time_period"); if (tp == null) { throw new InvalidTimePeriodException(null); } return(int.Parse(tp.Substring(0, 4))); }
public SubjectMeta Get(DataColumnCollection cols, DataRowCollection rows, Subject subject, StatisticsDbContext context) { var metaRows = GetMetaRows(CsvUtil.GetColumnValues(cols), rows); var filters = GetFilters(metaRows, subject, context).ToList(); var indicators = GetIndicators(metaRows, subject, context).ToList(); return(new SubjectMeta { Filters = filters, Indicators = indicators }); }
public static TimeIdentifier GetTimeIdentifier(IReadOnlyList <string> line, List <string> headers) { var timeIdentifier = CsvUtil.Value(line, headers, "time_identifier").ToLower(); foreach (var value in Enum.GetValues(typeof(TimeIdentifier)).Cast <TimeIdentifier>()) { if (value.GetEnumLabel().Equals(timeIdentifier, StringComparison.InvariantCultureIgnoreCase)) { return(value); } } throw new InvalidTimeIdentifierException(timeIdentifier); }
public static MetaRow GetMetaRow(List <string> cols, DataRow row) { return(CsvUtil.BuildType(CsvUtil.GetRowValues(row), cols, Enum.GetNames(typeof(MetaColumns)), values => new MetaRow { ColumnName = values[0], ColumnType = (ColumnType)Enum.Parse(typeof(ColumnType), values[1]), Label = values[2], FilterGroupingColumn = values[3], FilterHint = values[4], IndicatorGrouping = values[5], IndicatorUnit = EnumUtil.GetFromString <Unit>(values[6] ?? ""), DecimalPlaces = values[7] == null ? (int?)null : int.Parse(values[7]) })); }
public async Task ImportObservations(DataColumnCollection cols, DataRowCollection rows, Subject subject, SubjectMeta subjectMeta, int batchNo, int rowsPerBatch, StatisticsDbContext context) { _importerFilterService.ClearCache(); _importerLocationService.ClearCache(); var observations = GetObservations( context, rows, CsvUtil.GetColumnValues(cols), subject, subjectMeta, batchNo, rowsPerBatch).ToList(); await InsertObservations(context, observations); }
public static GeographicLevel GetGeographicLevel(IReadOnlyList <string> line, List <string> headers) { return(GetGeographicLevelFromString(CsvUtil.Value(line, headers, "geographic_level"))); }