Пример #1
0
        private void HandleSingleTimeSeriesDeletedRangeItem(TimeSeriesDeletedRangeItem item, string loadBehaviorFunction)
        {
            TimeSeriesValuesSegment.ParseTimeSeriesKey(item.Key, Context, out var docId, out var name);

            if (loadBehaviorFunction != null)
            {
                if (ShouldFilterByScriptAndGetParams(docId, name, loadBehaviorFunction, out (DateTime begin, DateTime end)? toLoad))
                {
                    return;
                }

                if (toLoad.HasValue && (toLoad.Value.begin > item.To || toLoad.Value.end < item.From))
                {
                    return;
                }
            }

            _currentRun.RemoveTimeSeries(docId, name, item.From, item.To);
        }
        public override unsafe void Read(DocumentsOperationContext context, IncomingReplicationStatsScope stats)
        {
            // TODO: add stats
            var keySize = *(int *)Reader.ReadExactly(sizeof(int));
            var key     = Reader.ReadExactly(keySize);

            ToDispose(Slice.From(context.Allocator, key, keySize, ByteStringType.Immutable, out Key));

            var segmentSize = *(int *)Reader.ReadExactly(sizeof(int));
            var mem         = Reader.AllocateMemory(segmentSize);

            Memory.Copy(mem, Reader.ReadExactly(segmentSize), segmentSize);
            Segment = new TimeSeriesValuesSegment(mem, segmentSize);

            SetLazyStringValueFromString(context, out Collection);
            Debug.Assert(Collection != null);

            SetLazyStringValueFromString(context, out Name);
            Debug.Assert(Name != null);
        }
Пример #3
0
        private void AssertNotIncrementalTimeSeriesForLegacyReplication(ReplicationBatchItem item)
        {
            if (item.Type == ReplicationBatchItem.ReplicationItemType.TimeSeriesSegment || item.Type == ReplicationBatchItem.ReplicationItemType.DeletedTimeSeriesRange)
            {
                using (_parent._database.DocumentsStorage.ContextPool.AllocateOperationContext(out JsonOperationContext context))
                {
                    LazyStringValue name;
                    switch (item)
                    {
                    case TimeSeriesDeletedRangeItem timeSeriesDeletedRangeItem:
                        TimeSeriesValuesSegment.ParseTimeSeriesKey(timeSeriesDeletedRangeItem.Key, context, out _, out name);
                        break;

                    case TimeSeriesReplicationItem timeSeriesReplicationItem:
                        name = timeSeriesReplicationItem.Name;
                        break;

                    default:
                        return;
                    }

                    if (TimeSeriesHandler.CheckIfIncrementalTs(name) == false)
                    {
                        return;
                    }
                }

                // the other side doesn't support incremental time series, stopping replication
                var message = $"{_parent.Node.FromString()} found an item of type 'IncrementalTimeSeries' to replicate to {_parent.Destination.FromString()}, " +
                              $"while we are in legacy mode (downgraded our replication version to match the destination). " +
                              $"Can't send Incremental-TimeSeries in legacy mode, destination {_parent.Destination.FromString()} does not support Incremental-TimeSeries feature. Stopping replication.";

                if (_log.IsInfoEnabled)
                {
                    _log.Info(message);
                }

                throw new LegacyReplicationViolationException(message);
            }
        }
Пример #4
0
        public unsafe void CanQueryTimeSeriesAggregation_DeclareSyntax_AllDocsQuery()
        {
            using (var store = GetDocumentStore())
            {
                var baseline = RavenTestHelper.UtcToday;

                using (var session = store.OpenSession())
                {
                    session.Store(new { Name = "Oren" }, "users/ayende");

                    var tsf = session.TimeSeriesFor <HeartRateMeasure>("users/ayende");
                    var tag = "watches/fitbit";
                    var m   = new HeartRateMeasure
                    {
                        HeartRate = 59d,
                    };
                    tsf.Append(baseline.AddMinutes(61), m, tag);

                    m.HeartRate = 79d;
                    tsf.Append(baseline.AddMinutes(62), m, tag);

                    m.HeartRate = 69d;
                    tsf.Append(baseline.AddMinutes(63), m, tag);

                    session.SaveChanges();
                }

                using (var session = store.OpenSession())
                {
                    var query = session.Advanced.RawQuery <TimeSeriesAggregationResult <HeartRateMeasure> >(@"
    declare timeseries out(u)
    {
        from u.HeartRateMeasures between $start and $end
        group by 1h
        select min(), max(), first(), last()
    }
    from @all_docs as u
    where id() == 'users/ayende'
    select out(u)
")
                                .AddParameter("start", baseline.EnsureUtc())
                                .AddParameter("end", baseline.AddDays(1).EnsureUtc());

                    var agg = query.First();
                    if (agg.Count != 3)
                    {
                        var db  = Databases.GetDocumentDatabaseInstanceFor(store).Result;
                        var tss = db.DocumentsStorage.TimeSeriesStorage;
                        using (db.DocumentsStorage.ContextPool.AllocateOperationContext(out DocumentsOperationContext ctx))
                            using (ctx.OpenReadTransaction())
                            {
                                var reader = tss.GetReader(ctx, "users/ayende", nameof(HeartRateMeasure) + "s", baseline, baseline.AddDays(1));

                                Assert.True(reader.Init());

                                Assert.NotNull(reader._tvr);

                                var key = reader._tvr.Read(0, out var size);

                                TimeSeriesValuesSegment.ParseTimeSeriesKey(key, size, ctx, out var docId, out var name, out DateTime baseline2);

                                Assert.Equal("users/ayende", docId);
                                Assert.Equal(nameof(HeartRateMeasure) + "s", name, StringComparer.InvariantCultureIgnoreCase);
                                Assert.Equal(baseline.AddMinutes(61), baseline2, RavenTestHelper.DateTimeComparer.Instance);

                                Assert.Equal(1, reader.SegmentsOrValues().Count());

                                Assert.False(query.First().Count == 3, "Query assertion failed once and passed on second try. sanity check passed");

                                //Assert.True(false, "Query assertion failed twice. sanity check passed");
                            }
                    }

                    Assert.Equal(3, agg.Count);

                    Assert.Equal(1, agg.Results.Length);

                    var val = agg.Results[0];
                    Assert.Equal(59, val.First.HeartRate);
                    Assert.Equal(59, val.Min.HeartRate);

                    Assert.Equal(69, val.Last.HeartRate);
                    Assert.Equal(79, val.Max.HeartRate);

                    Assert.Equal(baseline.AddMinutes(60), val.From, RavenTestHelper.DateTimeComparer.Instance);
                    Assert.Equal(baseline.AddMinutes(120), val.To, RavenTestHelper.DateTimeComparer.Instance);
                }
            }
        }