public void ValidateExactlyOnePoint(AggregatedDataRange dataRange, string reason) { if (dataRange.Data.Count != 2) { throw new Exception(reason); } }
public void ValidateDataRangesIsOfAggregation(AggregatedDataRange dataRange, int aggregationSeconds, string reason) { if (dataRange.AggregationSeconds != aggregationSeconds) { throw new Exception(reason); } }
public void JoinNullBoth() { AggregatedDataRange insert = null; AggregatedDataRange old = null; TestJoinTwoDataRanges(insert, old, null); }
public AggregatedDataRange GenerateData(string name, TimeRange timeRange, double minValue, double maxValue, double maxVariance, int aggregationSeconds, int quantisedInterval) { var multiple = quantisedInterval / (decimal)aggregationSeconds; if (multiple % 1 != 0) { throw new Exception("Quantised interval must be a multiple of the aggregation interval"); } var quantisedTimeRange = timeRange.Quantise(quantisedInterval); var quantisedSpan = (int)(quantisedTimeRange.Max - quantisedTimeRange.Min); var chunks = quantisedSpan / quantisedInterval; var points = quantisedSpan / aggregationSeconds; var pointsPerChunk = quantisedInterval / aggregationSeconds; var data = new double[points * 2]; for (int i = 0; i < chunks; i++) { var startPoint = i * pointsPerChunk; var endPoint = (i + 1) * pointsPerChunk; var startIndex = startPoint * 2; var endIndex = endPoint * 2; var startTime = quantisedTimeRange.Min + startPoint * aggregationSeconds; var endTime = quantisedTimeRange.Min + endPoint * aggregationSeconds; GenerateChunk(name, startTime, endTime, startIndex, endIndex, data, minValue, maxValue, maxVariance, aggregationSeconds); } var all = new AggregatedDataRange(quantisedTimeRange, data.ToList(), aggregationSeconds); return(_filterer.FilterDataRange(all, timeRange)); }
public void TestTouches(int minOne, int maxOne, int minTwo, int maxTwo) { var one = new AggregatedDataRange(minOne, maxOne, new List <double>(), 50); var two = new AggregatedDataRange(minTwo, maxTwo, new List <double>(), 50); one.TimeRange.Touches(two.TimeRange).Should().BeTrue(); }
private void TestJoinTwoDataRanges(AggregatedDataRange insert, List <AggregatedDataRange> old, AggregatedDataRange expected) { var actual = _rangeJoiner.JoinDataRangeToDataRanges(old, insert).ToList(); actual.Should().BeEquivalentTo(new List <AggregatedDataRange> { expected }); }
public void JoinNullInsertData() { AggregatedDataRange insert = new AggregatedDataRange(200, 300, new List <double>(), 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); TestJoinTwoDataRanges(insert, old, old); }
public void JoinNullInsertOld() { var insert = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); AggregatedDataRange old = new AggregatedDataRange(200, 300, new List <double>(), 50); TestJoinTwoDataRanges(insert, old, insert); }
public void JoinBefore() { var insert = new AggregatedDataRange(100, 200, new List <double> { 150, 16, 200, 21 }, 50); var old = new AggregatedDataRange(300, 400, new List <double> { 350, 35, 400, 40 }, 50); TestJoinTwoDataRanges(insert, old, insert, old); }
private void TestJoinTwoDataRanges(AggregatedDataRange insert, AggregatedDataRange old, AggregatedDataRange expectedOne, AggregatedDataRange expectedTwo) { var ranges = new List <AggregatedDataRange> { old }; var actual = _rangeJoiner.JoinDataRangeToDataRanges(ranges, insert).ToList(); actual.Should().BeEquivalentTo(new List <AggregatedDataRange> { expectedOne, expectedTwo }); }
public void JoinAfter() { var insert = new AggregatedDataRange(400, 500, new List <double> { 450, 46, 500, 51 }, 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); TestJoinTwoDataRanges(insert, old, insert, old); }
public void SubtractTimeFromSingleRangeTouchingStartOverlappingEnd() { var timeRange = new TimeRange(200, 400); var range = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var actual = _rangeFilterer.SubtractTimeRangeFromRange(range, timeRange); var expected = new List <AggregatedDataRange> { }; actual.Should().BeEquivalentTo(expected); }
public async Task <AggregatedDataRange> GetAggregatedDataRange(string sourceName, int aggregationSeconds, [FromQuery] DateTime minTimeUtc, [FromQuery] DateTime maxTimeUtc) { var timeRange = new TimeRange(minTimeUtc, maxTimeUtc).Quantise(aggregationSeconds); var raw = await GetRawRange(sourceName, timeRange); if (raw.Data.Count == 0) { return(new AggregatedDataRange(timeRange, new List <double>(), aggregationSeconds)); } var range = new AggregatedDataRange(timeRange, raw.Data, 5, true); return(_aggregator.Aggregate(new[] { range }, timeRange, new[] { aggregationSeconds })[300].FirstOrDefault()); }
public void JoinTouchingStartInside() { var insert = new AggregatedDataRange(200, 250, new List <double> { 250, 26 }, 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var expected = new AggregatedDataRange(200, 300, new List <double> { 250, 26, 300, 30 }, 50); TestJoinTwoDataRanges(insert, old, expected); }
public void JoinOverlappingStart() { var insert = new AggregatedDataRange(100, 250, new List <double> { 150, 16, 200, 21, 250, 26 }, 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var expected = new AggregatedDataRange(100, 300, new List <double> { 150, 16, 200, 21, 250, 26, 300, 30 }, 50); TestJoinTwoDataRanges(insert, old, expected); }
public void JoinCoveringOver() { var insert = new AggregatedDataRange(100, 400, new List <double> { 150, 16, 200, 21, 250, 26, 300, 31, 350, 36, 400, 41 }, 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var expected = new AggregatedDataRange(100, 400, new List <double> { 150, 16, 200, 21, 250, 26, 300, 31, 350, 36, 400, 41 }, 50); TestJoinTwoDataRanges(insert, old, expected); }
public void AddToEmpty() { var insert = new AggregatedDataRange(100, 200, new List <double> { 150, 16, 200, 21 }, 50); var actual = _rangeJoiner.JoinDataRangesToDataRanges(new List <AggregatedDataRange>(), new List <AggregatedDataRange> { insert }); var expected = new List <AggregatedDataRange> { insert }; actual.Should().BeEquivalentTo(expected); }
public void JoinOverlappingEnd() { var insert = new AggregatedDataRange(250, 350, new List <double> { 300, 31, 350, 36 }, 50); var old = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var expected = new AggregatedDataRange(200, 350, new List <double> { 250, 25, 300, 31, 350, 36 }, 50); TestJoinTwoDataRanges(insert, old, expected); }
public void JoinInside() { var insert = new AggregatedDataRange(240, 260, new List <double> { 260, 27 }, 20); var old = new AggregatedDataRange(200, 300, new List <double> { 220, 22, 240, 24, 260, 26, 280, 28, 300, 30 }, 20); var expected = new AggregatedDataRange(200, 300, new List <double> { 220, 22, 240, 24, 260, 27, 280, 28, 300, 30 }, 20); TestJoinTwoDataRanges(insert, old, expected); }
public void SubtractTimeFromSingleRangeBefore() { var timeRange = new TimeRange(10, 100); var range = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var actual = _rangeFilterer.SubtractTimeRangeFromRange(range, timeRange); var expected = new List <AggregatedDataRange> { new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50) }; actual.Should().BeEquivalentTo(expected); }
public void TestNotFull() { var input = new AggregatedDataRange(300, 400, new List <double> { 350, 35, 400, 40 }, 50); var actual = _aggregator.Aggregate(new List <AggregatedDataRange> { input }, new TimeRange(200, 400), 200); var expected = new List <AggregatedDataRange> { new AggregatedDataRange(200, 400, new List <double> { 400, 37.5 }, 200) }; actual.Should().BeEquivalentTo(expected); }
public async Task AddData(AggregatedDataRange aggregatedRange) { try { using (_tripCounterFactory.Create("SourceSeriesGrain.AddData")) { _logger.LogDebug($"Preparing to add data"); var seriesMapping = await _mapProvider.GetSeriesModelMapping(_dataSource, _sourceName); IAttributeSeriesGrain series = GrainClient.GrainFactory.GetGrain <IAttributeSeriesGrain>(SeriesIdHelper.ToAttributeSeriesId(seriesMapping.Entity, seriesMapping.Attribute)); await series.ReceiveData(aggregatedRange); } } catch (Exception exception) { _logger.LogError(new EventId(105), exception, "Error when adding aggregated data"); throw; } }
public void MergeEndToEnd() { var range = new AggregatedDataRange(500, 600, new List <double> { 550, 55, 600, 60 }, 50); var newRange = new AggregatedDataRange(400, 500, new List <double> { 450, 45, 500, 50 }, 50); var actual = _aggregatedMerger.MergeRangeWithRanges(new List <AggregatedDataRange> { range }, newRange); var expected = new List <AggregatedDataRange> { new AggregatedDataRange(400, 600, new List <double> { 450, 45, 500, 50, 550, 55, 600, 60 }, 50) }; actual.Should().BeEquivalentTo(expected); }
public async Task AddData(Guid dataSource, string sourceName, AggregatedDataRange aggregatedRange) { try { await _tripCounterFactory.Run("IngestionProcessor.AddData", () => { return(Task.CompletedTask); /*ISourceSeriesGrain series = GrainClient.GrainFactory.GetGrain<ISourceSeriesGrain>(SeriesIdHelper.ToSourceSeriesId(dataSource, sourceName)); * _logger.LogDebug("Saving slice"); * Stopwatch stopwatch = Stopwatch.StartNew(); * await series.AddData(aggregatedRange); * _logger.LogDebug($"Saved slice number {Interlocked.Increment(ref _count)} in {stopwatch.ElapsedMilliseconds} ms");*/ }); } catch (Exception exception) { _logger.LogError(new EventId(108), exception, "Error when adding aggregated ranges"); throw; } }
public static IEnumerable <Datum> GetData(this AggregatedDataRange source) { var i = 0; var datum = new Datum(); using (var dataEnumerator = source.Data.GetEnumerator()) { while (dataEnumerator.MoveNext()) { if (i % 2 == 1) { datum.Value = dataEnumerator.Current; yield return(datum); } else { datum.Time = dataEnumerator.Current; } i++; } } }
public void JoinBetweenTwoRanges() { var before = new AggregatedDataRange(200, 300, new List <double> { 250, 25, 300, 30 }, 50); var insert = new AggregatedDataRange(300, 400, new List <double> { 350, 35, 400, 40 }, 50); var after = new AggregatedDataRange(400, 500, new List <double> { 450, 45, 500, 50 }, 50); var expected = new AggregatedDataRange(200, 500, new List <double> { 250, 25, 300, 30, 350, 35, 400, 40, 450, 45, 500, 50 }, 50); var actual = _rangeJoiner.JoinDataRangeToDataRanges(new List <AggregatedDataRange> { before, after }, insert).ToList(); actual.Should().BeEquivalentTo(new List <AggregatedDataRange> { expected }); }
public async Task AddRealtimeAggregatedData(Guid dataSource, string sourceName, int aggregationSeconds, [FromBody] AggregatedDataRange dataRange) { var(entity, attribute) = await _model.ResolveEntityAndAttribute(dataSource, sourceName); await _processor.ReceiveRealtimeAggregatedData(entity, attribute, dataRange); }
public async Task AddHistoricalAggregatedData(Guid dataSource, string sourceName, int aggregationSeconds, [FromBody] AggregatedDataRange dataRange) { _logger.LogDebug($"Received historical aggregated data for {sourceName} {aggregationSeconds} from {dataRange?.TimeRange?.Min.ToDateTime():s} to {dataRange?.TimeRange?.Max.ToDateTime():s} with {dataRange?.Data?.Count / 2} points"); var(entity, attribute) = await _model.ResolveEntityAndAttribute(dataSource, sourceName); await _processor.ReceiveHistoricalData(entity, attribute, dataRange); }
public async Task ReceiveRealtimeAggregates(string sourceName, AggregatedDataRange dataRange) { await _client.SendRealtimeAggregatedData(sourceName, dataRange); }
private async Task SaveRange(Guid series, AggregatedDataRange dataRange) { var key = GetDataKey(series, dataRange.AggregationSeconds); var entries = dataRange.Data.GetData().Select(x => new SortedSetEntry($"{x.Time}:{x.Value}", x.Time)).ToArray(); await _db.SortedSetAddAsync(key, entries); }