public (List <TimeSeriesPoint> Points, List <TimeSeriesNote> Notes) LoadPoints() { if (!Context.DbType.HasValue) { throw new ExpectedException($"/{nameof(Context.DbType)} must be set"); } ValidateContext(); var query = ResolveQuery(Context.DbQuery, nameof(Context.DbQuery)); Log.Info($"Querying {Context.DbType} database for points ..."); using (var dbClient = DbClientFactory.CreateOpened(Context.DbType.Value, Context.DbConnectionString)) { var table = dbClient.ExecuteTable(query); ValidateTable(table); var points = table .Rows.Cast <DataRow>() .Select(ConvertRowToPoint) .Where(point => point != null) .ToList(); var notes = LoadNotes(dbClient); Log.Info($"Loaded {PointSummarizer.Summarize(points)} and {"note".ToQuantity(notes.Count)} from the database source."); return(points, notes); } }
private List <TimeSeriesPoint> LoadPointsFromNg(IAquariusClient client) { var timeSeriesInfo = client.GetTimeSeriesInfo(Context.SourceTimeSeries.Identifier); Log.Info($"Loading points from '{timeSeriesInfo.Identifier}' ..."); var timeSeriesData = client.Publish.Get(new TimeSeriesDataCorrectedServiceRequest { TimeSeriesUniqueId = timeSeriesInfo.UniqueId, QueryFrom = Context.SourceQueryFrom?.ToDateTimeOffset(), QueryTo = Context.SourceQueryTo?.ToDateTimeOffset() }); var gradesLookup = new MetadataLookup <PublishGrade>(timeSeriesData.Grades); var qualifiersLookup = new MetadataLookup <PublishQualifier>(timeSeriesData.Qualifiers); if (!Context.IgnoreNotes) { Notes.AddRange(LoadAllNotes(client, timeSeriesInfo, timeSeriesData.Notes)); } var points = timeSeriesData .Points .Select(p => new TimeSeriesPoint { Time = Instant.FromDateTimeOffset(p.Timestamp.DateTimeOffset), Value = p.Value.Numeric, GradeCode = gradesLookup.GetFirstMetadata(p.Timestamp.DateTimeOffset, g => int.Parse(g.GradeCode)), Qualifiers = qualifiersLookup.GetManyMetadata(p.Timestamp.DateTimeOffset, q => q.Identifier).ToList() }) .ToList(); var gapToleranceInMinutes = timeSeriesData.GapTolerances.Last().ToleranceInMinutes; var gapTolerance = gapToleranceInMinutes.HasValue ? Duration.FromMinutes((long)gapToleranceInMinutes.Value) : DurationExtensions.MaxGapDuration; var interpolationType = (InterpolationType)Enum.Parse(typeof(InterpolationType), timeSeriesData.InterpolationTypes.Last().Type, true); SetTimeSeriesCreationProperties( timeSeriesInfo, timeSeriesData.Methods.LastOrDefault()?.MethodCode, gapTolerance, interpolationType); Log.Info($"Loaded {PointSummarizer.Summarize(points)} and {"note".ToQuantity(Notes.Count)} from {timeSeriesInfo.Identifier}"); return(points); }
private List <TimeSeriesPoint> LoadPointsFrom3X(IAquariusClient client) { var timeSeriesDescription = client.Publish.Get(new Get3xTimeSeriesDescription { LocationIdentifier = Context.SourceTimeSeries.LocationIdentifier, Parameter = Context.SourceTimeSeries.Parameter }) .TimeSeriesDescriptions .SingleOrDefault(ts => ts.Identifier == Context.SourceTimeSeries.TargetIdentifier); if (timeSeriesDescription == null) { throw new ExpectedException($"Can't find '{Context.SourceTimeSeries.Identifier}' time-series in location '{Context.SourceTimeSeries.LocationIdentifier}'."); } Log.Info($"Loading points from '{timeSeriesDescription.Identifier}' ..."); var correctedData = client.Publish.Get(new Get3xCorrectedData { TimeSeriesIdentifier = Context.SourceTimeSeries.Identifier, QueryFrom = Context.SourceQueryFrom?.ToDateTimeOffset(), QueryTo = Context.SourceQueryTo?.ToDateTimeOffset() }); var points = correctedData .Points .Select(p => new TimeSeriesPoint { Time = Instant.FromDateTimeOffset(p.Timestamp), Value = p.Value, GradeCode = p.Grade }) .ToList(); // 3.X Publish API's TimeSeriesDescription is missing some info, so grab those pieces from elsewhere // The time-range start will always be in the offset of the time-series, even when no points exist var utcOffset = Offset.FromHoursAndMinutes(correctedData.TimeRange.StartTime.Offset.Hours, correctedData.TimeRange.StartTime.Offset.Minutes); // We can infer the interpolationType from the last point (if one exists) var interpolationType = Context.InterpolationType ?? (correctedData.Points.Any() ? (InterpolationType?)correctedData.Points.Last().Interpolation : null); var timeSeries = new TimeSeries { Identifier = Context.SourceTimeSeries.Identifier, Parameter = timeSeriesDescription.Parameter, Label = timeSeriesDescription.Label, Unit = timeSeriesDescription.Unit, Publish = timeSeriesDescription.Publish, Description = timeSeriesDescription.Description, Comment = timeSeriesDescription.Comment, TimeSeriesType = KnownTimeSeriesTypes[timeSeriesDescription.TimeSeriesType], UtcOffset = utcOffset, ComputationIdentifier = timeSeriesDescription.ComputationIdentifier, ComputationPeriodIdentifier = timeSeriesDescription.ComputationPeriodIdentifier, SubLocationIdentifier = timeSeriesDescription.SubLocationIdentifier, LocationIdentifier = timeSeriesDescription.LocationIdentifier, ExtendedAttributeValues = timeSeriesDescription.ExtendedAttributes.Select(ea => new ExtendedAttributeValue { ColumnIdentifier = $"{ea.Name.ToUpperInvariant()}@TIMESERIES_EXTENSION", Value = ea.Value?.ToString() }) .ToList() }; SetTimeSeriesCreationProperties(timeSeries, interpolationType: interpolationType); if (!Context.IgnoreNotes) { var corrections = client.Publish.Get(new Get3xCorrectionList { TimeSeriesIdentifier = Context.SourceTimeSeries.Identifier, QueryFrom = Context.SourceQueryFrom?.ToDateTimeOffset(), QueryTo = Context.SourceQueryTo?.ToDateTimeOffset() }).Corrections; var utcTimespan = utcOffset.ToTimeSpan(); Notes.AddRange(corrections.Select(c => Convert3XCorrection(utcTimespan, c))); } Log.Info($"Loaded {PointSummarizer.Summarize(points)} and {"note".ToQuantity(Notes.Count)} from {Context.SourceTimeSeries.Identifier}"); return(points); }
private List <TimeSeriesPoint> LoadPoints(string path) { var isUri = Uri.TryCreate(path, UriKind.Absolute, out var uri); if (!isUri && !File.Exists(path)) { throw new ExpectedException($"File '{path}' does not exist."); } var points = isUri ? LoadUrlPoints(uri) : LoadExcelPoints(path) ?? LoadCsvPoints(path); var anyGapPoints = points.Any(p => p.Type == PointType.Gap); if (Context.CsvRemoveDuplicatePoints && !anyGapPoints) { points = points .OrderBy(p => p.Time) .ToList(); var duplicatePointCount = 0; for (var i = 1; i < points.Count; ++i) { var prevPoint = points[i - 1]; var point = points[i]; if (point.Time != prevPoint.Time) { continue; } ++duplicatePointCount; Log.Warn($"Discarding duplicate CSV point at {point.Time} with value {point.Value}"); points.RemoveAt(i); --i; } if (duplicatePointCount > 0) { Log.Warn($"Removed {duplicatePointCount} duplicate CSV points."); } } if (Context.CsvRealign && !anyGapPoints) { points = points .OrderBy(p => p.Time) .ToList(); if (points.Any()) { // ReSharper disable once PossibleInvalidOperationException var delta = points.First().Time.Value - Context.StartTime; foreach (var point in points) { // ReSharper disable once PossibleInvalidOperationException point.Time = point.Time.Value.Minus(delta); } } } Log.Info($"Loaded {PointSummarizer.Summarize(points)} from '{path}'."); return(points); }