private async Task GetDocumentsByIdAsync(DocumentsOperationContext context, Microsoft.Extensions.Primitives.StringValues ids, bool metadataOnly) { var sw = Stopwatch.StartNew(); var includePaths = GetStringValuesQueryString("include", required: false); var documents = new List <Document>(ids.Count); var includes = new List <Document>(includePaths.Count * ids.Count); var includeDocs = new IncludeDocumentsCommand(Database.DocumentsStorage, context, includePaths, isProjection: false); GetCountersQueryString(Database, context, out var includeCounters); GetTimeSeriesQueryString(Database, context, out var includeTimeSeries); GetCompareExchangeValueQueryString(Database, out var includeCompareExchangeValues); using (includeCompareExchangeValues) { foreach (var id in ids) { Document document = null; if (string.IsNullOrEmpty(id) == false) { document = Database.DocumentsStorage.Get(context, id); } if (document == null && ids.Count == 1) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotFound; return; } documents.Add(document); includeDocs.Gather(document); includeCounters?.Fill(document); includeTimeSeries?.Fill(document); includeCompareExchangeValues?.Gather(document); } includeDocs.Fill(includes); includeCompareExchangeValues?.Materialize(); var actualEtag = ComputeHttpEtags.ComputeEtagForDocuments(documents, includes, includeCounters, includeTimeSeries, includeCompareExchangeValues); var etag = GetStringFromHeaders("If-None-Match"); if (etag == actualEtag) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotModified; return; } HttpContext.Response.Headers[Constants.Headers.Etag] = "\"" + actualEtag + "\""; int numberOfResults = 0; numberOfResults = await WriteDocumentsJsonAsync(context, metadataOnly, documents, includes, includeCounters?.Results, includeTimeSeries?.Results, includeCompareExchangeValues?.Results, numberOfResults); AddPagingPerformanceHint(PagingOperationType.Documents, nameof(GetDocumentsByIdAsync), HttpContext.Request.QueryString.Value, numberOfResults, documents.Count, sw.ElapsedMilliseconds); } }
private static unsafe string CombineHashesFromMultipleRanges(Dictionary <string, List <TimeSeriesRangeResult> > ranges) { // init hash var size = Sodium.crypto_generichash_bytes(); Debug.Assert((int)size == 32); var cryptoGenerichashStatebytes = (int)Sodium.crypto_generichash_statebytes(); var state = stackalloc byte[cryptoGenerichashStatebytes]; if (Sodium.crypto_generichash_init(state, null, UIntPtr.Zero, size) != 0) { ComputeHttpEtags.ThrowFailToInitHash(); } ComputeHttpEtags.HashNumber(state, ranges.Count); foreach (var kvp in ranges) { foreach (var range in kvp.Value) { ComputeHttpEtags.HashChangeVector(state, range.Hash); } } return(ComputeHttpEtags.FinalizeHash(size, state)); }
private void GetDocumentsById(DocumentsOperationContext context, StringValues ids, bool metadataOnly) { var sw = Stopwatch.StartNew(); var includePaths = GetStringValuesQueryString("include", required: false); var documents = new List <Document>(ids.Count); var includes = new List <Document>(includePaths.Count * ids.Count); var includeDocs = new IncludeDocumentsCommand(Database.DocumentsStorage, context, includePaths); foreach (var id in ids) { var document = Database.DocumentsStorage.Get(context, id); if (document == null && ids.Count == 1) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotFound; return; } documents.Add(document); includeDocs.Gather(document); } includeDocs.Fill(includes); var actualEtag = ComputeHttpEtags.ComputeEtagForDocuments(documents, includes); var etag = GetStringFromHeaders("If-None-Match"); if (etag == actualEtag) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotModified; return; } HttpContext.Response.Headers[Constants.Headers.Etag] = "\"" + actualEtag + "\""; int numberOfResults; var blittable = GetBoolValueQueryString("blittable", required: false) ?? false; if (blittable) { WriteDocumentsBlittable(context, documents, includes, out numberOfResults); } else { WriteDocumentsJson(context, metadataOnly, documents, includes, out numberOfResults); } AddPagingPerformanceHint(PagingOperationType.Documents, nameof(GetDocumentsById), HttpContext, numberOfResults, documents.Count, sw.Elapsed); }
private async Task GetRevisionByChangeVector(DocumentsOperationContext context, Microsoft.Extensions.Primitives.StringValues changeVectors, bool metadataOnly, CancellationToken token) { var revisionsStorage = Database.DocumentsStorage.RevisionsStorage; var sw = Stopwatch.StartNew(); var revisions = new List <Document>(changeVectors.Count); foreach (var changeVector in changeVectors) { var revision = revisionsStorage.GetRevision(context, changeVector); if (revision == null && changeVectors.Count == 1) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotFound; return; } revisions.Add(revision); } var actualEtag = ComputeHttpEtags.ComputeEtagForRevisions(revisions); var etag = GetStringFromHeaders("If-None-Match"); if (etag == actualEtag) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotModified; return; } HttpContext.Response.Headers[Constants.Headers.Etag] = "\"" + actualEtag + "\""; long numberOfResults; long totalDocumentsSizeInBytes; var blittable = GetBoolValueQueryString("blittable", required: false) ?? false; if (blittable) { WriteRevisionsBlittable(context, revisions, out numberOfResults, out totalDocumentsSizeInBytes); } else { (numberOfResults, totalDocumentsSizeInBytes) = await WriteRevisionsJsonAsync(context, metadataOnly, revisions, token); } AddPagingPerformanceHint(PagingOperationType.Documents, nameof(GetRevisionByChangeVector), HttpContext.Request.QueryString.Value, numberOfResults, revisions.Count, sw.ElapsedMilliseconds, totalDocumentsSizeInBytes); }
private void GetRevisionByChangeVector(DocumentsOperationContext context, StringValues changeVectors, bool metadataOnly) { var revisionsStorage = Database.DocumentsStorage.RevisionsStorage; var sw = Stopwatch.StartNew(); var revisions = new List <Document>(changeVectors.Count); foreach (var changeVector in changeVectors) { var revision = revisionsStorage.GetRevision(context, changeVector); if (revision == null && changeVectors.Count == 1) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotFound; return; } revisions.Add(revision); } var actualEtag = ComputeHttpEtags.ComputeEtagForRevisions(revisions); var etag = GetStringFromHeaders("If-None-Match"); if (etag == actualEtag) { HttpContext.Response.StatusCode = (int)HttpStatusCode.NotModified; return; } HttpContext.Response.Headers[Constants.Headers.Etag] = "\"" + actualEtag + "\""; int numberOfResults; var blittable = GetBoolValueQueryString("blittable", required: false) ?? false; if (blittable) { WriteRevisionsBlittable(context, revisions, out numberOfResults); } else { WriteRevisionsJson(context, metadataOnly, revisions, out numberOfResults); } AddPagingPerformanceHint(PagingOperationType.Documents, nameof(GetRevisionByChangeVector), HttpContext, numberOfResults, revisions.Count, sw.Elapsed); }
private void IncludeDocument(string id) { if (_includesDictionary.ContainsKey(id)) { return; } var doc = _context.DocumentDatabase.DocumentsStorage.Get(_context, id, throwOnConflict: false); doc?.EnsureMetadata(); _includesDictionary[id] = doc?.Data; ComputeHttpEtags.HashChangeVector(_state, doc?.ChangeVector); if (doc?.Data == null) { return; } _includes[id] = doc.Data; }
internal static unsafe TimeSeriesRangeResult GetTimeSeriesRange(DocumentsOperationContext context, string docId, string name, DateTime from, DateTime to, ref int start, ref int pageSize) { if (pageSize == 0) { return(new TimeSeriesRangeResult()); } List <TimeSeriesEntry> values = new List <TimeSeriesEntry>(); var reader = new TimeSeriesReader(context, docId, name, from, to, offset: null); // init hash var size = Sodium.crypto_generichash_bytes(); Debug.Assert((int)size == 32); var cryptoGenerichashStatebytes = (int)Sodium.crypto_generichash_statebytes(); var state = stackalloc byte[cryptoGenerichashStatebytes]; if (Sodium.crypto_generichash_init(state, null, UIntPtr.Zero, size) != 0) { ComputeHttpEtags.ThrowFailToInitHash(); } var oldStart = start; var lastResult = true; foreach (var(individualValues, segmentResult) in reader.SegmentsOrValues()) { if (individualValues == null && start > segmentResult.Summary.Span[0].Count) { start -= segmentResult.Summary.Span[0].Count; continue; } var enumerable = individualValues ?? segmentResult.Values; foreach (var singleResult in enumerable) { if (start-- > 0) { continue; } if (pageSize-- <= 0) { lastResult = false; break; } values.Add(new TimeSeriesEntry { Timestamp = singleResult.Timestamp, Tag = singleResult.Tag, Values = singleResult.Values.ToArray(), IsRollup = singleResult.Type == SingleResultType.RolledUp }); } ComputeHttpEtags.HashChangeVector(state, segmentResult?.ChangeVector); if (pageSize <= 0) { break; } } if ((oldStart > 0) && (values.Count == 0)) { return(new TimeSeriesRangeResult()); } return(new TimeSeriesRangeResult { From = (oldStart > 0) ? values[0].Timestamp : from, To = lastResult ? to : values.Last().Timestamp, Entries = values.ToArray(), Hash = ComputeHttpEtags.FinalizeHash(size, state) }); }
internal static unsafe TimeSeriesRangeResult GetTimeSeriesRange(DocumentsOperationContext context, string docId, string name, DateTime from, DateTime to, ref int start, ref int pageSize) { if (pageSize == 0) { return(null); } List <TimeSeriesEntry> values = new List <TimeSeriesEntry>(); var reader = new TimeSeriesReader(context, docId, name, from, to, offset: null); // init hash var size = Sodium.crypto_generichash_bytes(); Debug.Assert((int)size == 32); var cryptoGenerichashStatebytes = (int)Sodium.crypto_generichash_statebytes(); var state = stackalloc byte[cryptoGenerichashStatebytes]; if (Sodium.crypto_generichash_init(state, null, UIntPtr.Zero, size) != 0) { ComputeHttpEtags.ThrowFailToInitHash(); } var initialStart = start; var hasMore = false; DateTime lastSeenEntry = from; foreach (var(individualValues, segmentResult) in reader.SegmentsOrValues()) { if (individualValues == null && start > segmentResult.Summary.NumberOfLiveEntries) { lastSeenEntry = segmentResult.End; start -= segmentResult.Summary.NumberOfLiveEntries; continue; } var enumerable = individualValues ?? segmentResult.Values; foreach (var singleResult in enumerable) { lastSeenEntry = segmentResult.End; if (start-- > 0) { continue; } if (pageSize-- <= 0) { hasMore = true; break; } values.Add(new TimeSeriesEntry { Timestamp = singleResult.Timestamp, Tag = singleResult.Tag, Values = singleResult.Values.ToArray(), IsRollup = singleResult.Type == SingleResultType.RolledUp }); } ComputeHttpEtags.HashChangeVector(state, segmentResult.ChangeVector); if (pageSize <= 0) { break; } } var hash = ComputeHttpEtags.FinalizeHash(size, state); if ((initialStart > 0) && (values.Count == 0)) { // this is a special case, because before the 'start' we might have values return new TimeSeriesRangeResult { From = lastSeenEntry, To = to, Entries = values.ToArray(), Hash = hash } } ; return(new TimeSeriesRangeResult { From = (initialStart > 0) ? values[0].Timestamp : from, To = hasMore ? values.Last().Timestamp : to, Entries = values.ToArray(), Hash = hash }); }