private Dictionary <string, List <TimeSeriesRangeResult> > GetTimeSeriesForDocument(string docId, HashSet <TimeSeriesRange> timeSeriesToGet) { var dictionary = new Dictionary <string, List <TimeSeriesRangeResult> >(StringComparer.OrdinalIgnoreCase); foreach (var range in timeSeriesToGet) { var start = 0; var pageSize = int.MaxValue; var timeSeriesRangeResult = TimeSeriesHandler.GetTimeSeriesRange(_context, docId, range.Name, range.From ?? DateTime.MinValue, range.To ?? DateTime.MaxValue, ref start, ref pageSize); if (timeSeriesRangeResult == null) { Debug.Assert(pageSize <= 0, "Page size must be zero or less here"); return(dictionary); } if (dictionary.TryGetValue(range.Name, out var list) == false) { dictionary[range.Name] = new List <TimeSeriesRangeResult> { timeSeriesRangeResult }; } else { list.Add(timeSeriesRangeResult); } } return(dictionary); }
protected override int LoadInternal(IEnumerable <ICommandData> items, DocumentsOperationContext context, EtlStatsScope scope) { var commands = items as List <ICommandData>; Debug.Assert(commands != null); if (commands.Count == 0) { return(0); } if (ShouldTrackTimeSeries()) { foreach (var command in commands) { if (command is TimeSeriesBatchCommandData tsbc) { if (TimeSeriesHandler.CheckIfIncrementalTs(tsbc.Name)) { throw new NotSupportedException($"Load isn't support for incremental time series '{tsbc.Name}' at document '{tsbc.Id}'"); } } } } BatchOptions options = null; if (Configuration.LoadRequestTimeoutInSec != null) { options = new BatchOptions { RequestTimeout = TimeSpan.FromSeconds(Configuration.LoadRequestTimeoutInSec.Value) }; } using (var batchCommand = new SingleNodeBatchCommand(DocumentConventions.DefaultForServer, context, commands, options)) { var duration = Stopwatch.StartNew(); try { BeforeActualLoad?.Invoke(this); AsyncHelpers.RunSync(() => _requestExecutor.ExecuteAsync(batchCommand, context, token: CancellationToken)); _recentUrl = _requestExecutor.Url; return(commands.Count); } catch (OperationCanceledException e) { if (CancellationToken.IsCancellationRequested == false) { ThrowTimeoutException(commands.Count, duration.Elapsed, e); } throw; } } }
public void AddTimeSeries(string timeseries, string fromStr, string toStr, string sourcePath = null) { var key = sourcePath ?? string.Empty; if (TimeSeries.TryGetValue(key, out var hashSet) == false) { TimeSeries[key] = hashSet = new HashSet <AbstractTimeSeriesRange>(AbstractTimeSeriesRangeComparer.Instance); } hashSet.Add(new TimeSeriesRange { Name = timeseries, From = string.IsNullOrEmpty(fromStr) ? DateTime.MinValue : TimeSeriesHandler.ParseDate(fromStr, timeseries), To = string.IsNullOrEmpty(toStr) ? DateTime.MaxValue : TimeSeriesHandler.ParseDate(toStr, timeseries) }); }
public async Task Stream() { var documentId = GetStringQueryString("docId"); var name = GetStringQueryString("name"); var fromStr = GetStringQueryString("from", required: false); var toStr = GetStringQueryString("to", required: false); var offset = GetTimeSpanQueryString("offset", required: false); var from = string.IsNullOrEmpty(fromStr) ? DateTime.MinValue : TimeSeriesHandler.ParseDate(fromStr, name); var to = string.IsNullOrEmpty(toStr) ? DateTime.MaxValue : TimeSeriesHandler.ParseDate(toStr, name); using (ContextPool.AllocateOperationContext(out DocumentsOperationContext context)) using (context.OpenReadTransaction()) { using (var token = CreateOperationToken()) await using (var writer = new AsyncBlittableJsonTextWriter(context, ResponseBodyStream())) { var reader = new TimeSeriesReader(context, documentId, name, from, to, offset, token.Token); writer.WriteStartObject(); writer.WritePropertyName("Results"); writer.WriteStartArray(); foreach (var entry in reader.AllValues()) { context.Write(writer, entry.ToTimeSeriesEntryJson()); writer.WriteComma(); await writer.MaybeFlushAsync(token.Token); } writer.WriteEndArray(); writer.WriteEndObject(); await writer.MaybeFlushAsync(token.Token); } } }
private void AssertNotIncrementalTimeSeriesForLegacyReplication(ReplicationBatchItem item) { if (item.Type == ReplicationBatchItem.ReplicationItemType.TimeSeriesSegment || item.Type == ReplicationBatchItem.ReplicationItemType.DeletedTimeSeriesRange) { using (_parent._database.DocumentsStorage.ContextPool.AllocateOperationContext(out JsonOperationContext context)) { LazyStringValue name; switch (item) { case TimeSeriesDeletedRangeItem timeSeriesDeletedRangeItem: TimeSeriesValuesSegment.ParseTimeSeriesKey(timeSeriesDeletedRangeItem.Key, context, out _, out name); break; case TimeSeriesReplicationItem timeSeriesReplicationItem: name = timeSeriesReplicationItem.Name; break; default: return; } if (TimeSeriesHandler.CheckIfIncrementalTs(name) == false) { return; } } // the other side doesn't support incremental time series, stopping replication var message = $"{_parent.Node.FromString()} found an item of type 'IncrementalTimeSeries' to replicate to {_parent.Destination.FromString()}, " + $"while we are in legacy mode (downgraded our replication version to match the destination). " + $"Can't send Incremental-TimeSeries in legacy mode, destination {_parent.Destination.FromString()} does not support Incremental-TimeSeries feature. Stopping replication."; if (_log.IsInfoEnabled) { _log.Info(message); } throw new LegacyReplicationViolationException(message); } }