private static bool RequiredForNextPolicy(DocumentsOperationContext context, TimeSeriesCollectionConfiguration config, TimeSeriesPolicy policy, Slice item, DateTime to) { var tss = context.DocumentDatabase.DocumentsStorage.TimeSeriesStorage; var next = config.GetNextPolicy(policy); if (ReferenceEquals(next, TimeSeriesPolicy.AfterAllPolices) == false) { TimeSeriesRollups.SplitKey(item, out var docId, out var name); var raw = name.Split(TimeSeriesConfiguration.TimeSeriesRollupSeparator)[0]; var currentStats = tss.Stats.GetStats(context, docId, policy.GetTimeSeriesName(raw)); var nextStats = tss.Stats.GetStats(context, docId, next.GetTimeSeriesName(raw)); var nextEnd = nextStats.End.Add(next.AggregationTime).AddMilliseconds(-1); if (nextEnd > currentStats.End) { return(false); } if (nextEnd < to) { return(true); } } return(false); }
public async Task TimeSeriesLinqQuery_CanUseSimpleCallExpressionInName() { using (var store = GetDocumentStore()) { var timeSeries = "HeartRate"; var retention = TimeValue.FromHours(48); var p = new TimeSeriesPolicy("ByHour", TimeValue.FromHours(1), TimeValue.FromHours(24)); var config = new TimeSeriesConfiguration { Collections = new Dictionary <string, TimeSeriesCollectionConfiguration> { ["Users"] = new TimeSeriesCollectionConfiguration { RawPolicy = new RawTimeSeriesPolicy(TimeValue.FromHours(96)), Policies = new List <TimeSeriesPolicy> { p } } } }; await store.Maintenance.SendAsync(new ConfigureTimeSeriesOperation(config)); var baseline = DateTime.UtcNow.Add(-retention * 2); using (var session = store.OpenSession()) { session.Store(new User(), DocId); var timeSeriesFor = session.TimeSeriesFor(DocId, timeSeries); for (int i = 0; i < 100; i++) { timeSeriesFor.Append(baseline.AddHours(i), 29 * i, "watches/fitbit"); } session.SaveChanges(); } var database = await Databases.GetDocumentDatabaseInstanceFor(store); await TimeSeries.WaitForPolicyRunnerAsync(database); using (var session = store.OpenSession()) { // todo aviv : remove the single quotes from name when RavenDB-15792 is fixed var q = session.Query <User>() .Where(u => u.Id == DocId) .Select(u => RavenQuery.TimeSeries(u, $"'{p.GetTimeSeriesName(timeSeries)}'") .ToList()); var result = q.First(); Assert.Equal(24, result.Count); } } }
private void RollupOne(DocumentsOperationContext context, Table table, RollupState item, TimeSeriesPolicy policy, TimeSeriesCollectionConfiguration config) { var tss = context.DocumentDatabase.DocumentsStorage.TimeSeriesStorage; var rawTimeSeries = item.Name.Split(TimeSeriesConfiguration.TimeSeriesRollupSeparator)[0]; var intoTimeSeries = policy.GetTimeSeriesName(rawTimeSeries); var rollupStart = item.NextRollup.Add(-policy.AggregationTime); if (config.MaxRetention < TimeValue.MaxValue) { var next = new DateTime(NextRollup(_now.Add(-config.MaxRetention), policy)).Add(-policy.AggregationTime); var rollupStartTicks = Math.Max(rollupStart.Ticks, next.Ticks); rollupStart = new DateTime(rollupStartTicks); } var intoReader = tss.GetReader(context, item.DocId, intoTimeSeries, rollupStart, DateTime.MaxValue); var previouslyAggregated = intoReader.AllValues().Any(); if (previouslyAggregated) { var changeVector = intoReader.GetCurrentSegmentChangeVector(); if (ChangeVectorUtils.GetConflictStatus(item.ChangeVector, changeVector) == ConflictStatus.AlreadyMerged) { // this rollup is already done table.DeleteByKey(item.Key); return; } } if (_isFirstInTopology == false) { return; } var rollupEnd = new DateTime(NextRollup(_now, policy)).Add(-policy.AggregationTime).AddMilliseconds(-1); var reader = tss.GetReader(context, item.DocId, item.Name, rollupStart, rollupEnd); if (previouslyAggregated) { var hasPriorValues = tss.GetReader(context, item.DocId, item.Name, DateTime.MinValue, rollupStart).AllValues().Any(); if (hasPriorValues == false) { table.DeleteByKey(item.Key); var first = tss.GetReader(context, item.DocId, item.Name, rollupStart, DateTime.MaxValue).First(); if (first == default) { return; } if (first.Timestamp > item.NextRollup) { // if the 'source' time-series doesn't have any values it is retained. // so we need to aggregate only from the next time frame using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection)) { tss.Rollups.MarkForPolicy(context, slicer, policy, first.Timestamp); } return; } } } // rollup from the the raw data will generate 6-value roll up of (first, last, min, max, sum, count) // other rollups will aggregate each of those values by the type var mode = item.Name.Contains(TimeSeriesConfiguration.TimeSeriesRollupSeparator) ? AggregationMode.FromAggregated : AggregationMode.FromRaw; var rangeSpec = new RangeGroup(); switch (policy.AggregationTime.Unit) { case TimeValueUnit.Second: rangeSpec.Ticks = TimeSpan.FromSeconds(policy.AggregationTime.Value).Ticks; rangeSpec.TicksAlignment = RangeGroup.Alignment.Second; break; case TimeValueUnit.Month: rangeSpec.Months = policy.AggregationTime.Value; break; default: throw new ArgumentOutOfRangeException(nameof(policy.AggregationTime.Unit), $"Not supported time value unit '{policy.AggregationTime.Unit}'"); } rangeSpec.InitializeRange(rollupStart); var values = GetAggregatedValues(reader, rangeSpec, mode); if (previouslyAggregated) { // if we need to re-aggregate we need to delete everything we have from that point on. var removeRequest = new TimeSeriesStorage.DeletionRangeRequest { Collection = item.Collection, DocumentId = item.DocId, Name = intoTimeSeries, From = rollupStart, To = DateTime.MaxValue, }; tss.DeleteTimestampRange(context, removeRequest); } var before = context.LastDatabaseChangeVector; var after = tss.AppendTimestamp(context, item.DocId, item.Collection, intoTimeSeries, values, verifyName: false); if (before != after) { RolledUp++; } table.DeleteByKey(item.Key); var stats = tss.Stats.GetStats(context, item.DocId, item.Name); if (stats.End > rollupEnd) { // we know that we have values after the current rollup and we need to mark them var nextRollup = rollupEnd.AddMilliseconds(1); intoReader = tss.GetReader(context, item.DocId, item.Name, nextRollup, DateTime.MaxValue); if (intoReader.Init() == false) { Debug.Assert(false, "We have values but no segment?"); return; } using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection)) { tss.Rollups.MarkForPolicy(context, slicer, policy, intoReader.First().Timestamp); } } }
public void Examples() { var store = new DocumentStore { Urls = new[] { "http://*****:*****@DailyRollupForOneYear"); //Create local instance of the rollup time-series - second method: //using the rollup policy itself and the raw time-series' name var rollupTimeSeries2 = session.TimeSeriesFor("sales/1", dailyRollup.GetTimeSeriesName("rawSales")); //Retrieve all the data from both time-series var rawData = rawTS.Get(DateTime.MinValue, DateTime.MaxValue).ToList(); var rollupData = dailyRollupTS.Get(DateTime.MinValue, DateTime.MaxValue).ToList(); #endregion } }