public void MarkForPolicy(DocumentsOperationContext context, TimeSeriesSliceHolder slicerHolder, DateTime timestamp, ulong status)
        {
            if (ShouldMarkForPolicy(context, slicerHolder, timestamp, status, out TimeSeriesPolicy nextPolicy) == false)
                return;

            _database.DocumentsStorage.TimeSeriesStorage.Rollups.MarkForPolicy(context, slicerHolder, nextPolicy, timestamp);
        }
Exemple #2
0
        public void UpdateCountOfExistingStats(DocumentsOperationContext context, TimeSeriesSliceHolder slicer, CollectionName collection, long count)
        {
            if (count == 0)
            {
                return;
            }

            var table = GetOrCreateTable(context.Transaction.InnerTransaction, collection);

            using (ReadStats(context, table, slicer, out var oldCount, out var start, out var end, out var name))
            {
                if (oldCount == 0)
                {
                    return;
                }

                using (table.Allocate(out var tvb))
                {
                    tvb.Add(slicer.StatsKey);
                    tvb.Add(GetPolicy(slicer));
                    tvb.Add(Bits.SwapBytes(start.Ticks));
                    tvb.Add(end);
                    tvb.Add(oldCount + count);
                    tvb.Add(name);

                    table.Set(tvb);
                }
            }
        }
Exemple #3
0
 public string GetTimeSeriesNameOriginalCasing(DocumentsOperationContext context, string docId, string name)
 {
     using (var slicer = new TimeSeriesSliceHolder(context, docId, name))
     {
         return(GetTimeSeriesNameOriginalCasing(context, slicer.StatsKey));
     }
 }
Exemple #4
0
 public (long Count, DateTime Start, DateTime End) GetStats(DocumentsOperationContext context, string docId, string name)
 {
     using (var slicer = new TimeSeriesSliceHolder(context, docId, name))
     {
         return(GetStats(context, slicer));
     }
 }
Exemple #5
0
        public void UpdateCountOfExistingStats(DocumentsOperationContext context, string docId, string name, CollectionName collection, long count)
        {
            if (count == 0)
            {
                return;
            }

            using (var slicer = new TimeSeriesSliceHolder(context, docId, name))
            {
                UpdateCountOfExistingStats(context, slicer, collection, count);
            }
        }
        public void MarkSegmentForPolicy(
            DocumentsOperationContext context, 
            TimeSeriesSliceHolder slicerHolder, 
            DateTime timestamp, 
            string changeVector,
            int numberOfEntries)
        {
            var status = numberOfEntries == 0 ? TimeSeriesValuesSegment.Dead : TimeSeriesValuesSegment.Live;
            if (ShouldMarkForPolicy(context, slicerHolder, timestamp, status, out TimeSeriesPolicy nextPolicy) == false) 
                return;

            _database.DocumentsStorage.TimeSeriesStorage.Rollups.MarkSegmentForPolicy(context, slicerHolder, nextPolicy, timestamp, changeVector);
        }
        public void MarkForPolicy(DocumentsOperationContext context, TimeSeriesSliceHolder slicerHolder, DateTime timestamp, ulong status)
        {
            if (Configuration.Collections.TryGetValue(slicerHolder.Collection, out var config) == false)
            {
                return;
            }

            if (config.Disabled)
            {
                return;
            }

            var currentIndex = config.GetPolicyIndexByTimeSeries(slicerHolder.Name);

            if (currentIndex == -1) // policy not found
            {
                return;
            }

            var nextPolicy = config.GetNextPolicy(currentIndex);

            if (nextPolicy == null)
            {
                return;
            }

            if (ReferenceEquals(nextPolicy, TimeSeriesPolicy.AfterAllPolices))
            {
                return; // this is the last policy
            }
            if (status == TimeSeriesValuesSegment.Dead)
            {
                var currentPolicy = config.GetPolicy(currentIndex);
                if (currentPolicy.RetentionTime < TimeValue.MaxValue)
                {
                    var now         = context.DocumentDatabase.Time.GetUtcNow();
                    var startRollup = new DateTime(TimeSeriesRollups.NextRollup(timestamp, nextPolicy)).Add(-currentPolicy.RetentionTime);
                    if (startRollup.Add(currentPolicy.RetentionTime) < now)
                    {
                        return; // ignore this value since it is outside our retention frame
                    }
                }
            }

            _database.DocumentsStorage.TimeSeriesStorage.Rollups.MarkForPolicy(context, slicerHolder, nextPolicy, timestamp);
        }
        public unsafe void MarkForPolicy(DocumentsOperationContext context, TimeSeriesSliceHolder slicerHolder, TimeSeriesPolicy nextPolicy, DateTime timestamp)
        {
            var nextRollup = NextRollup(timestamp, nextPolicy);

            // mark for rollup
            RollupSchema.Create(context.Transaction.InnerTransaction, TimeSeriesRollupTable, 16);
            var table = context.Transaction.InnerTransaction.OpenTable(RollupSchema, TimeSeriesRollupTable);

            using (table.Allocate(out var tvb))
                using (Slice.From(context.Allocator, nextPolicy.Name, ByteStringType.Immutable, out var policyToApply))
                {
                    if (table.ReadByKey(slicerHolder.StatsKey, out var tvr))
                    {
                        // check if we need to update this
                        var existingRollup = Bits.SwapBytes(*(long *)tvr.Read((int)RollupColumns.NextRollup, out _));
                        if (existingRollup <= nextRollup)
                        {
                            return; // we have an earlier date to roll up from
                        }
                    }

                    if (_logger.IsInfoEnabled)
                    {
                        _logger.Info(
                            $"Marking {slicerHolder.Name} in document {slicerHolder.DocId} for policy {nextPolicy.Name} to rollup at {new DateTime(nextRollup)} (ticks:{nextRollup})");
                    }

                    var etag         = context.DocumentDatabase.DocumentsStorage.GenerateNextEtag();
                    var changeVector = context.DocumentDatabase.DocumentsStorage.GetNewChangeVector(context, etag);
                    using (Slice.From(context.Allocator, changeVector, ByteStringType.Immutable, out var changeVectorSlice))
                    {
                        tvb.Add(slicerHolder.StatsKey);
                        tvb.Add(slicerHolder.CollectionSlice);
                        tvb.Add(Bits.SwapBytes(nextRollup));
                        tvb.Add(policyToApply);
                        tvb.Add(etag);
                        tvb.Add(changeVectorSlice);

                        table.Set(tvb);
                    }
                }
        }
        public void MarkSegmentForPolicy(DocumentsOperationContext context, TimeSeriesSliceHolder slicerHolder, DateTime timestamp,
                                         string changeVector,
                                         int numberOfEntries)
        {
            if (Configuration.Collections.TryGetValue(slicerHolder.Collection, out var config) == false)
            {
                return;
            }

            var currentIndex = config.GetPolicyIndexByTimeSeries(slicerHolder.Name);

            if (currentIndex == -1) // policy not found
            {
                return;
            }

            var nextPolicy = config.GetNextPolicy(currentIndex);

            if (nextPolicy == null)
            {
                return;
            }

            if (ReferenceEquals(nextPolicy, TimeSeriesPolicy.AfterAllPolices))
            {
                return; // this is the last policy
            }
            if (numberOfEntries == 0)
            {
                var currentPolicy = config.GetPolicy(currentIndex);
                var now           = context.DocumentDatabase.Time.GetUtcNow();
                var nextRollup    = new DateTime(TimeSeriesRollups.NextRollup(timestamp, nextPolicy));
                var startRollup   = nextRollup.Add(-currentPolicy.RetentionTime);
                if (now - startRollup > currentPolicy.RetentionTime)
                {
                    return; // ignore this segment since it is outside our retention frame
                }
            }

            _database.DocumentsStorage.TimeSeriesStorage.Rollups.MarkSegmentForPolicy(context, slicerHolder, nextPolicy, timestamp, changeVector);
        }
            private static void MarkForNextPolicyAfterRollup(DocumentsOperationContext context, Table table, RollupState item, TimeSeriesPolicy policy, TimeSeriesStorage tss,
                                                             DateTime rollupEnd)
            {
                table.DeleteByKey(item.Key);
                (long Count, DateTime Start, DateTime End)stats = tss.Stats.GetStats(context, item.DocId, item.Name);

                if (stats.End > rollupEnd)
                {
                    // we know that we have values after the current rollup and we need to mark them
                    var nextRollup = rollupEnd.AddMilliseconds(1);
                    TimeSeriesReader intoReader = tss.GetReader(context, item.DocId, item.Name, nextRollup, DateTime.MaxValue);
                    if (intoReader.Init() == false)
                    {
                        Debug.Assert(false, "We have values but no segment?");
                        return;
                    }

                    using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                    {
                        tss.Rollups.MarkForPolicy(context, slicer, policy, intoReader.First().Timestamp);
                    }
                }
            }
        private bool ShouldMarkForPolicy(
            DocumentsOperationContext context, 
            TimeSeriesSliceHolder slicerHolder, 
            DateTime timestamp, 
            ulong status,
            out TimeSeriesPolicy nextPolicy)
        {
            nextPolicy = default;
            if (Configuration.Collections.TryGetValue(slicerHolder.Collection, out var config) == false)
                return false;

            var currentIndex = config.GetPolicyIndexByTimeSeries(slicerHolder.Name);
            if (currentIndex == -1) // policy not found
                return false;

            nextPolicy = config.GetNextPolicy(currentIndex);
            if (nextPolicy == null)
                return false;

            if (ReferenceEquals(nextPolicy, TimeSeriesPolicy.AfterAllPolices))
                return false;

            if (status == TimeSeriesValuesSegment.Dead)
            {
                var currentPolicy = config.GetPolicy(currentIndex);
                if (currentPolicy.RetentionTime < TimeValue.MaxValue)
                {
                    var now = context.DocumentDatabase.Time.GetUtcNow();
                    var startRollup = new DateTime(TimeSeriesRollups.NextRollup(timestamp, nextPolicy)).Add(-currentPolicy.RetentionTime);
                    if (startRollup.Add(currentPolicy.RetentionTime) < now)
                        return false; // ignore this since it is outside our retention frame
                }
            }

            return true;
        }
Exemple #12
0
 public (long Count, DateTime Start, DateTime End) GetStats(DocumentsOperationContext context, TimeSeriesSliceHolder slicer)
 {
     return(GetStats(context, slicer.StatsKey));
 }
Exemple #13
0
        public long UpdateStats(DocumentsOperationContext context, TimeSeriesSliceHolder slicer, CollectionName collection, TimeSeriesValuesSegment segment, DateTime baseline, int modifiedEntries)
        {
            long     previousCount;
            DateTime start, end;

            context.DocumentDatabase.Metrics.TimeSeries.PutsPerSec.MarkSingleThreaded(modifiedEntries);
            context.DocumentDatabase.Metrics.TimeSeries.BytesPutsPerSec.MarkSingleThreaded(segment.NumberOfBytes);

            var table = GetOrCreateTable(context.Transaction.InnerTransaction, collection);

            using (ReadStats(context, table, slicer, out previousCount, out start, out end, out var name))
            {
                var liveEntries = segment.NumberOfLiveEntries;
                if (liveEntries > 0)
                {
                    HandleLiveSegment();
                }

                if (liveEntries == 0)
                {
                    if (TryHandleDeadSegment() == false)
                    {
                        // this ts was completely deleted
                        start = end = default;
                    }
                }

                var count = previousCount + liveEntries;

                using (table.Allocate(out var tvb))
                {
                    tvb.Add(slicer.StatsKey);
                    tvb.Add(GetPolicy(slicer));
                    tvb.Add(Bits.SwapBytes(start.Ticks));
                    tvb.Add(end);
                    tvb.Add(count);
                    tvb.Add(name);

                    table.Set(tvb);
                }

                return(count);
            }

            void HandleLiveSegment()
            {
                if (segment.NumberOfEntries == 1 && start > end)
                {
                    // new series
                    start = end = baseline;
                    return;
                }

                var lastTimestamp = GetLastLiveTimestamp(context, segment, baseline);

                if (lastTimestamp > end)
                {
                    end = lastTimestamp; // found later end
                }
                else
                {
                    var reader = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, start, DateTime.MaxValue);
                    var last   = reader.Last();

                    var lastValueInCurrentSegment = reader.ReadBaselineAsDateTime() == baseline;
                    end = lastValueInCurrentSegment ? lastTimestamp : last.Timestamp;
                }

                var first = segment.YieldAllValues(context, baseline, includeDead: false).First().Timestamp;

                if (first < start)
                {
                    start = first; // found earlier start
                }
                if (baseline <= start && first >= start)
                {
                    // start was removed
                    start = first;
                }
            }

            bool TryHandleDeadSegment()
            {
                if (previousCount == 0)
                {
                    return(false); // if current and previous are zero it means that this time-series was completely deleted
                }
                var readerOfFirstValue = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, DateTime.MinValue, DateTime.MaxValue);

                readerOfFirstValue.First();
                var firstValueInCurrentSegment = readerOfFirstValue.ReadBaselineAsDateTime() == baseline;

                var last = segment.GetLastTimestamp(baseline);

                if (baseline <= start && last >= start || firstValueInCurrentSegment)
                {
                    // start was removed, need to find the next start

                    // this segment isn't relevant, so let's get the next one
                    var next   = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, start, DateTime.MaxValue).NextSegmentBaseline();
                    var reader = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, next, DateTime.MaxValue);

                    var first = reader.First();
                    if (first == default)
                    {
                        return(false);
                    }

                    start = first.Timestamp;
                }

                var readerOfLastValue = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, start, DateTime.MaxValue);

                readerOfLastValue.Last();

                var lastValueInCurrentSegment = readerOfLastValue.ReadBaselineAsDateTime() == baseline;

                if (baseline <= end && end <= last || lastValueInCurrentSegment)
                {
                    var lastEntry = _timeSeriesStorage.GetReader(context, slicer.DocId, slicer.Name, start, baseline.AddMilliseconds(-1)).Last();
                    if (lastEntry == default)
                    {
                        return(false);
                    }

                    end = lastEntry.Timestamp;
                }

                return(true);
            }
        }
Exemple #14
0
        private static IDisposable ReadStats(DocumentsOperationContext context, Table table, TimeSeriesSliceHolder slicer, out long count, out DateTime start, out DateTime end, out Slice name)
        {
            count = 0;
            start = DateTime.MaxValue;
            end   = DateTime.MinValue;
            name  = slicer.NameSlice;

            if (table.ReadByKey(slicer.StatsKey, out var tvr) == false)
            {
                return(null);
            }

            count = DocumentsStorage.TableValueToLong((int)StatsColumns.Count, ref tvr);
            start = new DateTime(Bits.SwapBytes(DocumentsStorage.TableValueToLong((int)StatsColumns.Start, ref tvr)));
            end   = DocumentsStorage.TableValueToDateTime((int)StatsColumns.End, ref tvr);

            if (count == 0 && start == default && end == default)
            {
                // this is delete a stats, that we re-create, so we need to treat is as a new one.
                start = DateTime.MaxValue;
                end   = DateTime.MinValue;
                return(null);
            }

            return(DocumentsStorage.TableValueToSlice(context, (int)StatsColumns.Name, ref tvr, out name));
        }
Exemple #15
0
            protected override long ExecuteCmd(DocumentsOperationContext context)
            {
                var tss = context.DocumentDatabase.DocumentsStorage.TimeSeriesStorage;

                RollupSchema.Create(context.Transaction.InnerTransaction, TimeSeriesRollupTable, 16);
                var table = context.Transaction.InnerTransaction.OpenTable(RollupSchema, TimeSeriesRollupTable);

                foreach (var item in _states)
                {
                    if (_configuration == null)
                    {
                        return(RolledUp);
                    }

                    if (_configuration.Collections.TryGetValue(item.Collection, out var config) == false)
                    {
                        continue;
                    }

                    if (config.Disabled)
                    {
                        continue;
                    }

                    if (table.ReadByKey(item.Key, out var current) == false)
                    {
                        continue;
                    }

                    var policy = config.GetPolicyByName(item.RollupPolicy, out _);
                    if (policy == null)
                    {
                        table.DeleteByKey(item.Key);
                        continue;
                    }

                    if (item.Etag != DocumentsStorage.TableValueToLong((int)RollupColumns.Etag, ref current))
                    {
                        continue; // concurrency check
                    }
                    var rawTimeSeries  = item.Name.Split(TimeSeriesConfiguration.TimeSeriesRollupSeparator)[0];
                    var intoTimeSeries = policy.GetTimeSeriesName(rawTimeSeries);
                    var rollupStart    = item.NextRollup.Add(-policy.AggregationTime);

                    if (config.MaxRetention < TimeValue.MaxValue)
                    {
                        var next             = new DateTime(NextRollup(_now.Add(-config.MaxRetention), policy)).Add(-policy.AggregationTime);
                        var rollupStartTicks = Math.Max(rollupStart.Ticks, next.Ticks);
                        rollupStart = new DateTime(rollupStartTicks);
                    }

                    var intoReader           = tss.GetReader(context, item.DocId, intoTimeSeries, rollupStart, DateTime.MaxValue);
                    var previouslyAggregated = intoReader.AllValues().Any();
                    if (previouslyAggregated)
                    {
                        var changeVector = intoReader.GetCurrentSegmentChangeVector();
                        if (ChangeVectorUtils.GetConflictStatus(item.ChangeVector, changeVector) == ConflictStatus.AlreadyMerged)
                        {
                            // this rollup is already done
                            table.DeleteByKey(item.Key);
                            continue;
                        }
                    }

                    if (_isFirstInTopology == false)
                    {
                        continue; // we execute the actual rollup only on the primary node to avoid conflicts
                    }
                    var rollupEnd = new DateTime(NextRollup(_now, policy)).Add(-policy.AggregationTime).AddMilliseconds(-1);
                    var reader    = tss.GetReader(context, item.DocId, item.Name, rollupStart, rollupEnd);

                    if (previouslyAggregated)
                    {
                        var hasPriorValues = tss.GetReader(context, item.DocId, item.Name, DateTime.MinValue, rollupStart).AllValues().Any();
                        if (hasPriorValues == false)
                        {
                            table.DeleteByKey(item.Key);
                            var first = tss.GetReader(context, item.DocId, item.Name, rollupStart, DateTime.MaxValue).First();
                            if (first == default)
                            {
                                continue; // nothing we can do here
                            }
                            if (first.Timestamp > item.NextRollup)
                            {
                                // if the 'source' time-series doesn't have any values it is retained.
                                // so we need to aggregate only from the next time frame
                                using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                                {
                                    tss.Rollups.MarkForPolicy(context, slicer, policy, first.Timestamp);
                                }
                                continue;
                            }
                        }
                    }

                    // rollup from the the raw data will generate 6-value roll up of (first, last, min, max, sum, count)
                    // other rollups will aggregate each of those values by the type
                    var mode      = item.Name.Contains(TimeSeriesConfiguration.TimeSeriesRollupSeparator) ? AggregationMode.FromAggregated : AggregationMode.FromRaw;
                    var rangeSpec = new RangeGroup();
                    switch (policy.AggregationTime.Unit)
                    {
                    case TimeValueUnit.Second:
                        rangeSpec.Ticks          = TimeSpan.FromSeconds(policy.AggregationTime.Value).Ticks;
                        rangeSpec.TicksAlignment = RangeGroup.Alignment.Second;
                        break;

                    case TimeValueUnit.Month:
                        rangeSpec.Months = policy.AggregationTime.Value;
                        break;

                    default:
                        throw new ArgumentOutOfRangeException(nameof(policy.AggregationTime.Unit), $"Not supported time value unit '{policy.AggregationTime.Unit}'");
                    }
                    rangeSpec.InitializeRange(rollupStart);

                    List <SingleResult> values = null;
                    try
                    {
                        values = GetAggregatedValues(reader, rangeSpec, mode);
                    }
                    catch (RollupExceedNumberOfValuesException e)
                    {
                        var name  = item.Name;
                        var docId = item.DocId;
                        try
                        {
                            var document = context.DocumentDatabase.DocumentsStorage.Get(context, item.DocId, throwOnConflict: false);
                            docId = document?.Id ?? docId;
                            name  = tss.GetOriginalName(context, docId, name);
                        }
                        catch
                        {
                            // ignore
                        }
                        var msg = $"Rollup '{item.RollupPolicy}' for time-series '{name}' in document '{docId}' failed.";
                        if (_logger.IsInfoEnabled)
                        {
                            _logger.Info(msg, e);
                        }

                        var alert = AlertRaised.Create(context.DocumentDatabase.Name, "Failed to perform rollup because the time-series has more than 5 values", msg,
                                                       AlertType.RollupExceedNumberOfValues, NotificationSeverity.Warning, $"{item.DocId}/{item.Name}", new ExceptionDetails(e));

                        context.DocumentDatabase.NotificationCenter.Add(alert);

                        continue;
                    }

                    if (previouslyAggregated)
                    {
                        // if we need to re-aggregate we need to delete everything we have from that point on.
                        var removeRequest = new TimeSeriesStorage.DeletionRangeRequest
                        {
                            Collection = item.Collection,
                            DocumentId = item.DocId,
                            Name       = intoTimeSeries,
                            From       = rollupStart,
                            To         = DateTime.MaxValue,
                        };

                        tss.DeleteTimestampRange(context, removeRequest);
                    }

                    var before = context.LastDatabaseChangeVector;
                    var after  = tss.AppendTimestamp(context, item.DocId, item.Collection, intoTimeSeries, values, verifyName: false);
                    if (before != after)
                    {
                        RolledUp++;
                    }

                    table.DeleteByKey(item.Key);

                    var stats = tss.Stats.GetStats(context, item.DocId, item.Name);
                    if (stats.End > rollupEnd)
                    {
                        // we know that we have values after the current rollup and we need to mark them
                        var nextRollup = rollupEnd.AddMilliseconds(1);
                        intoReader = tss.GetReader(context, item.DocId, item.Name, nextRollup, DateTime.MaxValue);
                        if (intoReader.Init() == false)
                        {
                            Debug.Assert(false, "We have values but no segment?");
                            continue;
                        }

                        using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                        {
                            tss.Rollups.MarkForPolicy(context, slicer, policy, intoReader.First().Timestamp);
                        }
                    }
                }

                return(RolledUp);
            }
            private void RollupOne(DocumentsOperationContext context, Table table, RollupState item, TimeSeriesPolicy policy, TimeSeriesCollectionConfiguration config)
            {
                var tss = context.DocumentDatabase.DocumentsStorage.TimeSeriesStorage;

                var rawTimeSeries  = item.Name.Split(TimeSeriesConfiguration.TimeSeriesRollupSeparator)[0];
                var intoTimeSeries = policy.GetTimeSeriesName(rawTimeSeries);
                var rollupStart    = item.NextRollup.Add(-policy.AggregationTime);

                if (config.MaxRetention < TimeValue.MaxValue)
                {
                    var next             = new DateTime(NextRollup(_now.Add(-config.MaxRetention), policy)).Add(-policy.AggregationTime);
                    var rollupStartTicks = Math.Max(rollupStart.Ticks, next.Ticks);
                    rollupStart = new DateTime(rollupStartTicks);
                }

                var intoReader           = tss.GetReader(context, item.DocId, intoTimeSeries, rollupStart, DateTime.MaxValue);
                var previouslyAggregated = intoReader.AllValues().Any();

                if (previouslyAggregated)
                {
                    var changeVector = intoReader.GetCurrentSegmentChangeVector();
                    if (ChangeVectorUtils.GetConflictStatus(item.ChangeVector, changeVector) == ConflictStatus.AlreadyMerged)
                    {
                        // this rollup is already done
                        table.DeleteByKey(item.Key);
                        return;
                    }
                }

                if (_isFirstInTopology == false)
                {
                    return;
                }

                var rollupEnd = new DateTime(NextRollup(_now, policy)).Add(-policy.AggregationTime).AddMilliseconds(-1);
                var reader    = tss.GetReader(context, item.DocId, item.Name, rollupStart, rollupEnd);

                if (previouslyAggregated)
                {
                    var hasPriorValues = tss.GetReader(context, item.DocId, item.Name, DateTime.MinValue, rollupStart).AllValues().Any();
                    if (hasPriorValues == false)
                    {
                        table.DeleteByKey(item.Key);
                        var first = tss.GetReader(context, item.DocId, item.Name, rollupStart, DateTime.MaxValue).First();
                        if (first == default)
                        {
                            return;
                        }

                        if (first.Timestamp > item.NextRollup)
                        {
                            // if the 'source' time-series doesn't have any values it is retained.
                            // so we need to aggregate only from the next time frame
                            using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                            {
                                tss.Rollups.MarkForPolicy(context, slicer, policy, first.Timestamp);
                            }

                            return;
                        }
                    }
                }

                // rollup from the the raw data will generate 6-value roll up of (first, last, min, max, sum, count)
                // other rollups will aggregate each of those values by the type
                var mode      = item.Name.Contains(TimeSeriesConfiguration.TimeSeriesRollupSeparator) ? AggregationMode.FromAggregated : AggregationMode.FromRaw;
                var rangeSpec = new RangeGroup();

                switch (policy.AggregationTime.Unit)
                {
                case TimeValueUnit.Second:
                    rangeSpec.Ticks          = TimeSpan.FromSeconds(policy.AggregationTime.Value).Ticks;
                    rangeSpec.TicksAlignment = RangeGroup.Alignment.Second;
                    break;

                case TimeValueUnit.Month:
                    rangeSpec.Months = policy.AggregationTime.Value;
                    break;

                default:
                    throw new ArgumentOutOfRangeException(nameof(policy.AggregationTime.Unit), $"Not supported time value unit '{policy.AggregationTime.Unit}'");
                }

                rangeSpec.InitializeRange(rollupStart);

                var values = GetAggregatedValues(reader, rangeSpec, mode);

                if (previouslyAggregated)
                {
                    // if we need to re-aggregate we need to delete everything we have from that point on.
                    var removeRequest = new TimeSeriesStorage.DeletionRangeRequest
                    {
                        Collection = item.Collection,
                        DocumentId = item.DocId,
                        Name       = intoTimeSeries,
                        From       = rollupStart,
                        To         = DateTime.MaxValue,
                    };

                    tss.DeleteTimestampRange(context, removeRequest);
                }

                var before = context.LastDatabaseChangeVector;
                var after  = tss.AppendTimestamp(context, item.DocId, item.Collection, intoTimeSeries, values, verifyName: false);

                if (before != after)
                {
                    RolledUp++;
                }

                table.DeleteByKey(item.Key);

                var stats = tss.Stats.GetStats(context, item.DocId, item.Name);

                if (stats.End > rollupEnd)
                {
                    // we know that we have values after the current rollup and we need to mark them
                    var nextRollup = rollupEnd.AddMilliseconds(1);
                    intoReader = tss.GetReader(context, item.DocId, item.Name, nextRollup, DateTime.MaxValue);
                    if (intoReader.Init() == false)
                    {
                        Debug.Assert(false, "We have values but no segment?");
                        return;
                    }

                    using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                    {
                        tss.Rollups.MarkForPolicy(context, slicer, policy, intoReader.First().Timestamp);
                    }
                }
            }
            protected override long ExecuteCmd(DocumentsOperationContext context)
            {
                var storage = context.DocumentDatabase.DocumentsStorage;

                RollupSchema.Create(context.Transaction.InnerTransaction, TimeSeriesRollupTable, 16);
                var table = context.Transaction.InnerTransaction.OpenTable(RollupSchema, TimeSeriesRollupTable);

                foreach (var item in _states)
                {
                    if (_configuration == null)
                    {
                        return(RolledUp);
                    }

                    if (_configuration.Collections.TryGetValue(item.Collection, out var config) == false)
                    {
                        continue;
                    }

                    if (config.Disabled)
                    {
                        continue;
                    }

                    if (table.ReadByKey(item.Key, out var current) == false)
                    {
                        continue;
                    }

                    var policy = config.GetPolicyByName(item.RollupPolicy, out _);
                    if (policy == null)
                    {
                        table.DeleteByKey(item.Key);
                        continue;
                    }

                    if (item.Etag != DocumentsStorage.TableValueToLong((int)RollupColumns.Etag, ref current))
                    {
                        continue; // concurrency check
                    }
                    try
                    {
                        RollupOne(context, table, item, policy, config);
                    }
                    catch (NanValueException e)
                    {
                        if (_logger.IsInfoEnabled)
                        {
                            _logger.Info($"{item} failed", e);
                        }

                        if (table.VerifyKeyExists(item.Key) == false)
                        {
                            // we should re-add it, in case we already removed this rollup
                            using (var slicer = new TimeSeriesSliceHolder(context, item.DocId, item.Name, item.Collection))
                                using (Slice.From(context.Allocator, item.ChangeVector, ByteStringType.Immutable, out var cv))
                                    using (Slice.From(context.Allocator, item.RollupPolicy, ByteStringType.Immutable, out var policySlice))
                                        using (table.Allocate(out var tvb))
                                        {
                                            tvb.Add(slicer.StatsKey);
                                            tvb.Add(slicer.CollectionSlice);
                                            tvb.Add(Bits.SwapBytes(item.NextRollup.Ticks));
                                            tvb.Add(policySlice);
                                            tvb.Add(item.Etag);
                                            tvb.Add(cv);

                                            table.Set(tvb);
                                        }
                        }
                    }
                    catch (RollupExceedNumberOfValuesException e)
                    {
                        var name  = item.Name;
                        var docId = item.DocId;
                        try
                        {
                            var document = storage.Get(context, item.DocId, throwOnConflict: false);
                            docId = document?.Id ?? docId;
                            name  = storage.TimeSeriesStorage.GetOriginalName(context, docId, name);
                        }
                        catch
                        {
                            // ignore
                        }

                        var msg = $"Rollup '{item.RollupPolicy}' for time-series '{name}' in document '{docId}' failed.";
                        if (_logger.IsInfoEnabled)
                        {
                            _logger.Info(msg, e);
                        }

                        var alert = AlertRaised.Create(context.DocumentDatabase.Name, "Failed to perform rollup because the time-series has more than 5 values", msg,
                                                       AlertType.RollupExceedNumberOfValues, NotificationSeverity.Warning, $"{item.Collection}/{item.Name}", new ExceptionDetails(e));

                        context.DocumentDatabase.NotificationCenter.Add(alert);
                    }
                }

                return(RolledUp);
            }