public void GetDocumentsWithIdStartingWith(string idPrefix, string matches, string exclude, int start, int pageSize, CancellationToken token, ref int nextStart, Action <JsonDocument> addDoc, string transformer = null, Dictionary <string, RavenJToken> transformerParameters = null, string skipAfter = null) { if (idPrefix == null) { throw new ArgumentNullException("idPrefix"); } idPrefix = idPrefix.Trim(); var canPerformRapidPagination = nextStart > 0 && start == nextStart; var actualStart = canPerformRapidPagination ? start : 0; var addedDocs = 0; var docCountOnLastAdd = 0; var matchedDocs = 0; TransactionalStorage.Batch( actions => { var docsToSkip = canPerformRapidPagination ? 0 : start; int docCount; AbstractTransformer storedTransformer = null; var hasTransformer = transformer != null; if (hasTransformer) { storedTransformer = IndexDefinitionStorage.GetTransformer(transformer); if (storedTransformer == null) { throw new InvalidOperationException("No transformer with the name: " + transformer); } } do { Database.WorkContext.UpdateFoundWork(); docCount = 0; var docs = actions.Documents.GetDocumentsWithIdStartingWith(idPrefix, actualStart, pageSize, string.IsNullOrEmpty(skipAfter) ? null : skipAfter); var documentRetriever = new DocumentRetriever(Database.Configuration, actions, Database.ReadTriggers, transformerParameters, hasTransformer: hasTransformer); foreach (var doc in docs) { token.ThrowIfCancellationRequested(); docCount++; if (docCount - docCountOnLastAdd > 1000) { addDoc(null); // heartbeat } var keyTest = doc.Key.Substring(idPrefix.Length); if (!WildcardMatcher.Matches(matches, keyTest) || WildcardMatcher.MatchesExclusion(exclude, keyTest)) { continue; } JsonDocument.EnsureIdInMetadata(doc); var nonAuthoritativeInformationBehavior = actions.InFlightStateSnapshot.GetNonAuthoritativeInformationBehavior <JsonDocument>(null, doc.Key); var document = nonAuthoritativeInformationBehavior != null ? nonAuthoritativeInformationBehavior(doc) : doc; document = documentRetriever.ExecuteReadTriggers(document, null, ReadOperation.Load); if (document == null) { continue; } matchedDocs++; if (matchedDocs <= docsToSkip) { continue; } token.ThrowIfCancellationRequested(); document = TransformDocumentIfNeeded(document, storedTransformer, documentRetriever); addDoc(document); addedDocs++; docCountOnLastAdd = docCount; if (addedDocs >= pageSize) { break; } } actualStart += pageSize; } while (docCount > 0 && addedDocs < pageSize && actualStart > 0 && actualStart < int.MaxValue); }); if (addedDocs != pageSize) { nextStart = start; // will mark as last page } else if (canPerformRapidPagination) { nextStart = start + matchedDocs; } else { nextStart = actualStart; } }
private bool IsGcGen3SizeName(IReadOnlyList <WildcardMatcher> disabledMetrics) => !WildcardMatcher.IsAnyMatch(disabledMetrics, GcGen3SizeName);
internal static ITransaction StartTransactionAsync(HttpContext context, IApmLogger logger, ITracer tracer, IConfigSnapshot configSnapshot) { try { if (WildcardMatcher.IsAnyMatch(configSnapshot?.TransactionIgnoreUrls, context.Request.Path)) { logger.Debug()?.Log("Request ignored based on TransactionIgnoreUrls, url: {urlPath}", context.Request.Path); return(null); } ITransaction transaction; var transactionName = $"{context.Request.Method} {context.Request.Path}"; var containsTraceParentHeader = context.Request.Headers.TryGetValue(TraceContext.TraceParentHeaderName, out var traceParentHeader); var containsPrefixedTraceParentHeader = false; if (!containsTraceParentHeader) { containsPrefixedTraceParentHeader = context.Request.Headers.TryGetValue(TraceContext.TraceParentHeaderNamePrefixed, out traceParentHeader); } if (containsPrefixedTraceParentHeader || containsTraceParentHeader) { var tracingData = context.Request.Headers.TryGetValue(TraceContext.TraceStateHeaderName, out var traceStateHeader) ? TraceContext.TryExtractTracingData(traceParentHeader, traceStateHeader) : TraceContext.TryExtractTracingData(traceParentHeader); if (tracingData != null) { logger.Debug() ?.Log( "Incoming request with {TraceParentHeaderName} header. DistributedTracingData: {DistributedTracingData}. Continuing trace.", containsPrefixedTraceParentHeader ? TraceContext.TraceParentHeaderNamePrefixed : TraceContext.TraceParentHeaderName, tracingData); transaction = tracer.StartTransaction(transactionName, ApiConstants.TypeRequest, tracingData); } else { logger.Debug() ?.Log( "Incoming request with invalid {TraceParentHeaderName} header (received value: {TraceParentHeaderValue}). Starting trace with new trace id.", containsPrefixedTraceParentHeader ? TraceContext.TraceParentHeaderNamePrefixed : TraceContext.TraceParentHeaderName, traceParentHeader); transaction = tracer.StartTransaction(transactionName, ApiConstants.TypeRequest); } } else { logger.Debug()?.Log("Incoming request. Starting Trace."); transaction = tracer.StartTransaction(transactionName, ApiConstants.TypeRequest); } return(transaction); } catch (Exception ex) { logger?.Error()?.LogException(ex, "Exception thrown while trying to start transaction"); return(null); } }
internal static ITransaction CreateConsumerTransaction( IApmAgent agent, string topic, Partition?partition, Offset?offset, IMessage message) { ITransaction transaction = null; try { if (agent.Tracer.CurrentTransaction is not null) { return(null); } if (agent is ApmAgent apmAgent) { var matcher = WildcardMatcher.AnyMatch(apmAgent.ConfigurationStore.CurrentSnapshot.IgnoreMessageQueues, topic); if (matcher != null) { agent.Logger.Trace() ?.Log( "Not tracing message from {Queue} because it matched IgnoreMessageQueues pattern {Matcher}", topic, matcher.GetMatcher()); return(null); } } DistributedTracingData distributedTracingData = null; if (message?.Headers != null) { var headers = new KafkaHeadersCollection(message.Headers, agent.Logger); try { var traceParent = string.Join(",", headers.GetValues(TraceContext.TraceParentBinaryHeaderName)); var traceState = headers.GetValues(TraceContext.TraceStateHeaderName).FirstOrDefault(); distributedTracingData = TraceContext.TryExtractTracingData(traceParent, traceState); } catch (Exception ex) { agent.Logger.Error()?.LogException(ex, "Error extracting propagated headers from Kafka message"); } } var name = string.IsNullOrEmpty(topic) ? "Kafka RECEIVE" : $"Kafka RECEIVE from {topic}"; transaction = agent.Tracer.StartTransaction(name, ApiConstants.TypeMessaging, distributedTracingData); if (partition is not null) { transaction.SetLabel("partition", partition.ToString()); } if (offset is not null) { transaction.SetLabel("offset", offset.ToString()); } // record only queue topic name and age on context for now. capture body and headers potentially in future transaction.Context.Message = new Message { Queue = new Queue { Name = topic } }; if (transaction is Transaction realTransaction && message is not null && message.Timestamp.Type != 0) { var consumeTime = TimeUtils.ToDateTime(realTransaction.Timestamp); var produceTime = message.Timestamp.UtcDateTime; var age = Math.Max(0, (consumeTime - produceTime).TotalMilliseconds); if (age > 0 && age < MaxAge) { transaction.Context.Message.Age = new Age { Ms = (long)age } } ; } if (message is not null && message.Value is null) { transaction.SetLabel("tombstone", "true"); } } catch (Exception ex) { agent.Logger.Error()?.LogException(ex, "Error creating or populating transaction."); } return(transaction); }
private bool IsGcCountNameEnabled(IReadOnlyList <WildcardMatcher> disabledMetrics) => !WildcardMatcher.IsAnyMatch(disabledMetrics, GcCountName);
public bool IsEnabled(IReadOnlyList <WildcardMatcher> disabledMetrics) => !WildcardMatcher.IsAnyMatch(disabledMetrics, ProcessCpuTotalPct);
public bool IsEnabled(IReadOnlyList <WildcardMatcher> disabledMetrics) => WildcardMatcher.IsAnyMatch(disabledMetrics, BreakdownMetricsProvider.SpanSelfTime);
public async Task <IEnumerable <TagRequest> > QueryTaggedRequestsAsync(int limit, string[] filterTags = null) { var result = await Task.Run(() => { // Get tagged requests collection var taggedRequests = ServerContext.Database.GetCollection <TagRequest>(DatabaseConstants.TaggedRequestDataKey); // Log by descending timestamp return(taggedRequests.Find( Query.And( Query.All(nameof(TagRequest.Timestamp), Query.Descending), Query.Where(nameof(TagRequest.Tag), v => filterTags == null || filterTags.Any(f => WildcardMatcher.IsMatch(v.AsString, f))) ), limit: limit )); }); return(result); }
/// <summary> /// OnMethodEnd callback /// </summary> /// <typeparam name="TTarget">Type of the target</typeparam> /// <typeparam name="TResult">Type of the BasicGetResult</typeparam> /// <param name="instance">Instance value, aka `this` of the instrumented method.</param> /// <param name="basicGetResult">BasicGetResult instance</param> /// <param name="exception">Exception instance in case the original code threw an exception.</param> /// <param name="state">Calltarget state value</param> /// <returns>A default CallTargetReturn to satisfy the CallTarget contract</returns> public static CallTargetReturn <TResult> OnMethodEnd <TTarget, TResult>(TTarget instance, TResult basicGetResult, Exception exception, CallTargetState state) where TResult : IBasicGetResult, IDuckType { var queue = (string)state.State; var startTime = state.StartTime; var agent = Agent.Instance; var transaction = agent.Tracer.CurrentTransaction; if (transaction is null) { return(new CallTargetReturn <TResult>(basicGetResult)); } var matcher = WildcardMatcher.AnyMatch(transaction.Configuration.IgnoreMessageQueues, queue); if (matcher != null) { agent.Logger.Trace() ?.Log( "Not tracing message from {Queue} because it matched IgnoreMessageQueues pattern {Matcher}", queue, matcher.GetMatcher()); return(new CallTargetReturn <TResult>(basicGetResult)); } // check if there is an actual instance of the duck-typed type. RabbitMQ client can return null when the server // answers that there are no messages available var instanceNotNull = basicGetResult.Instance != null; if (instanceNotNull) { var normalizedExchange = RabbitMqIntegration.NormalizeExchangeName(basicGetResult.Exchange); matcher = WildcardMatcher.AnyMatch(transaction.Configuration.IgnoreMessageQueues, normalizedExchange); if (matcher != null) { agent.Logger.Trace() ?.Log( "Not tracing message from {Queue} because it matched IgnoreMessageQueues pattern {Matcher}", normalizedExchange, matcher.GetMatcher()); return(new CallTargetReturn <TResult>(basicGetResult)); } } var normalizedQueue = RabbitMqIntegration.NormalizeQueueName(queue); var span = agent.Tracer.CurrentExecutionSegment().StartSpan( $"{RabbitMqIntegration.Name} POLL from {normalizedQueue}", ApiConstants.TypeMessaging, RabbitMqIntegration.Subtype); if (startTime.HasValue && span is Span realSpan) { realSpan.Timestamp = TimeUtils.ToTimestamp(startTime.Value); } span.Context.Message = new Message { Queue = new Queue { Name = queue } }; if (instanceNotNull) { span.SetLabel("message_size", basicGetResult.Body?.Length ?? 0); if (!string.IsNullOrEmpty(basicGetResult.RoutingKey)) { span.Context.Message.RoutingKey = basicGetResult.RoutingKey; } } span.EndCapturingException(exception); return(new CallTargetReturn <TResult>(basicGetResult)); }
/// <summary> /// Extracts the request body using measure to prevent the 'read once' problem (cannot read after the body ha been already /// read). /// </summary> /// <param name="request"></param> /// <param name="logger"></param> /// <returns></returns> public static async Task <string> ExtractRequestBodyAsync(this HttpRequest request, IApmLogger logger, IConfigSnapshot configSnapshot) { string body = null; try { if (request.HasFormContentType) { var form = await request.ReadFormAsync(Consts.FormContentOptions); var itemProcessed = 0; if (form != null && form.Count > 0) { var sb = new StringBuilder(); foreach (var item in form) { sb.Append(item.Key); sb.Append("="); if (WildcardMatcher.IsAnyMatch(configSnapshot.SanitizeFieldNames, item.Key)) { sb.Append(Elastic.Apm.Consts.Redacted); } else { sb.Append(item.Value); } itemProcessed++; if (itemProcessed != form.Count) { sb.Append("&"); } } body = sb.ToString(); } } else { request.EnableBuffering(); request.Body.Position = 0; using (var reader = new StreamReader(request.Body, Encoding.UTF8, false, 1024 * 2, true)) body = await reader.ReadToEndAsync(); // Truncate the body to the first 2kb if it's longer if (body.Length > Consts.RequestBodyMaxLength) { body = body.Substring(0, Consts.RequestBodyMaxLength); } request.Body.Position = 0; } } catch (IOException ioException) { logger.Error()?.LogException(ioException, "IO Error reading request body"); } catch (Exception e) { logger.Error()?.LogException(e, "Error reading request body"); } return(body); }
/// <summary> /// Extracts the request body, up to a specified maximum length. /// The request body that is read is buffered. /// </summary> /// <param name="request">The request</param> /// <param name="logger">The logger</param> /// <param name="configSnapshot">The configuration snapshot</param> /// <returns></returns> public static string ExtractRequestBody(this HttpRequest request, IApmLogger logger, IConfigSnapshot configSnapshot) { string body = null; var longerThanMaxLength = false; try { if (request.HasFormContentType) { var form = request.Form; var itemProcessed = 0; if (form != null && form.Count > 0) { var sb = new StringBuilder(); foreach (var item in form) { sb.Append(item.Key); sb.Append("="); if (WildcardMatcher.IsAnyMatch(configSnapshot.SanitizeFieldNames, item.Key)) { sb.Append(Elastic.Apm.Consts.Redacted); } else { sb.Append(item.Value); } itemProcessed++; if (itemProcessed != form.Count) { sb.Append("&"); } // perf: check length once per iteration and truncate at the end, rather than each append if (sb.Length > RequestBodyMaxLength) { longerThanMaxLength = true; break; } } body = sb.ToString(0, Math.Min(sb.Length, RequestBodyMaxLength)); } } else { // allow synchronous reading of the request stream, which is false by default from 3.0 onwards. // Reading must be synchronous as it can happen within a synchronous diagnostic listener method var bodyControlFeature = request.HttpContext.Features.Get <IHttpBodyControlFeature>(); if (bodyControlFeature != null) { bodyControlFeature.AllowSynchronousIO = true; } request.EnableBuffering(); var requestBody = request.Body; requestBody.Position = 0; var arrayPool = ArrayPool <char> .Shared; var capacity = 512; var buffer = arrayPool.Rent(capacity); var totalRead = 0; int read; // requestBody.Length is 0 on initial buffering - length relates to how much has been read and buffered. // Read to just beyond request body max length so that we can determine if truncation will occur try { // TODO: can we assume utf-8 encoding? using var reader = new StreamReader(requestBody, Encoding.UTF8, false, buffer.Length, true); while ((read = reader.Read(buffer, 0, capacity)) != 0) { totalRead += read; if (totalRead > RequestBodyMaxLength) { longerThanMaxLength = true; break; } } } finally { arrayPool.Return(buffer); } requestBody.Position = 0; capacity = Math.Min(totalRead, RequestBodyMaxLength); buffer = arrayPool.Rent(capacity); try { using var reader = new StreamReader(requestBody, Encoding.UTF8, false, RequestBodyMaxLength, true); read = reader.ReadBlock(buffer, 0, capacity); body = new string(buffer, 0, read); } finally { arrayPool.Return(buffer); } requestBody.Position = 0; } } catch (IOException ioException) { logger.Error()?.LogException(ioException, "IO Error reading request body"); } catch (Exception e) { logger.Error()?.LogException(e, "Error reading request body"); } if (longerThanMaxLength) { logger.Debug()?.Log("truncated body to max length {MaxLength}", RequestBodyMaxLength); } return(body); }
public void CanMatch(string pattern, string input, bool expected) { Assert.Equal(expected, WildcardMatcher.Matches(pattern, input)); }
private bool IsProcessWorkingSetMemoryEnabled(IReadOnlyList <WildcardMatcher> disabledMetrics) => !WildcardMatcher.IsAnyMatch(disabledMetrics, ProcessWorkingSetMemory);
private bool IsTotalMemoryEnabled(IReadOnlyList <WildcardMatcher> disabledMetrics) => !WildcardMatcher.IsAnyMatch(disabledMetrics, TotalMemory);
public MetricsCollector(IApmLogger logger, IPayloadSender payloadSender, IConfigSnapshotProvider configSnapshotProvider) { _logger = logger.Scoped(nameof(MetricsCollector)); _payloadSender = payloadSender; _configSnapshotProvider = configSnapshotProvider; var currentConfigSnapshot = configSnapshotProvider.CurrentSnapshot; var interval = currentConfigSnapshot.MetricsIntervalInMilliseconds; // ReSharper disable once CompareOfFloatsByEqualityOperator if (interval == 0) { _logger.Info()?.Log("Collecting metrics is disabled - the agent won't collect metrics"); return; } MetricsProviders = new List <IMetricsProvider>(); if (!WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, ProcessTotalCpuTimeProvider.ProcessCpuTotalPct)) { MetricsProviders.Add(new ProcessTotalCpuTimeProvider(_logger)); } if (!WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, SystemTotalCpuProvider.SystemCpuTotalPct)) { MetricsProviders.Add(new SystemTotalCpuProvider(_logger)); } var collectProcessWorkingSet = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, ProcessWorkingSetAndVirtualMemoryProvider.ProcessWorkingSetMemory); var collectProcessVirtualMemory = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, ProcessWorkingSetAndVirtualMemoryProvider.ProcessVirtualMemory); if (collectProcessVirtualMemory || collectProcessWorkingSet) { MetricsProviders.Add(new ProcessWorkingSetAndVirtualMemoryProvider(collectProcessVirtualMemory, collectProcessWorkingSet)); } var collectTotalMemory = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, FreeAndTotalMemoryProvider.TotalMemory); var collectFreeMemory = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, FreeAndTotalMemoryProvider.FreeMemory); if (collectFreeMemory || collectTotalMemory) { MetricsProviders.Add(new FreeAndTotalMemoryProvider(collectFreeMemory, collectTotalMemory)); } var collectGcCount = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, GcMetricsProvider.GcCountName); var collectGen0Size = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, GcMetricsProvider.GcGen0SizeName); var collectGen1Size = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, GcMetricsProvider.GcGen1SizeName); var collectGen2Size = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, GcMetricsProvider.GcGen2SizeName); var collectGen3Size = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, GcMetricsProvider.GcGen3SizeName); if (collectGcCount || collectGen0Size || collectGen1Size || collectGen2Size || collectGen3Size) { MetricsProviders.Add(new GcMetricsProvider(_logger, collectGcCount, collectGen0Size, collectGen1Size, collectGen2Size, collectGen3Size)); } var collectCgroupMemLimitBytes = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, CgroupMetricsProvider.SystemProcessCgroupMemoryMemLimitBytes); var collectCgroupMemUsageBytes = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, CgroupMetricsProvider.SystemProcessCgroupMemoryMemUsageBytes); var collectCgroupStatsInactiveFileBytes = !WildcardMatcher.IsAnyMatch(currentConfigSnapshot.DisableMetrics, CgroupMetricsProvider.SystemProcessCgroupMemoryStatsInactiveFileBytes); if (collectCgroupMemLimitBytes || collectCgroupMemUsageBytes || collectCgroupStatsInactiveFileBytes) { MetricsProviders.Add( new CgroupMetricsProvider(_logger, collectCgroupMemLimitBytes, collectCgroupMemUsageBytes, collectCgroupStatsInactiveFileBytes)); } _logger.Info()?.Log("Collecting metrics in {interval} milliseconds interval", interval); _timer = new Timer(interval); _timer.Elapsed += (sender, args) => { CollectAllMetrics(); }; }
public void TestComplexExpressions() { WildcardMatcher.ValueOf("/foo/*/baz*").Matches("/foo/a/bar/b/baz").Should().BeTrue(); WildcardMatcher.ValueOf("/foo/*/bar/*/baz").Matches("/foo/a/bar/b/baz").Should().BeTrue(); }
public HttpResponseMessage Get([FromUri] string[] fileNames) { var list = new List <FileHeader>(); var startsWith = GetQueryStringValue("startsWith"); if (string.IsNullOrEmpty(startsWith) == false) { var matches = GetQueryStringValue("matches"); var endsWithSlash = startsWith.EndsWith("/") || startsWith.EndsWith("\\"); startsWith = FileHeader.Canonize(startsWith); if (endsWithSlash) { startsWith += "/"; } Storage.Batch(accessor => { var actualStart = 0; var filesToSkip = Paging.Start; int fileCount, matchedFiles = 0, addedFiles = 0; do { fileCount = 0; foreach (var file in accessor.GetFilesStartingWith(startsWith, actualStart, Paging.PageSize)) { fileCount++; var keyTest = file.FullPath.Substring(startsWith.Length); if (WildcardMatcher.Matches(matches, keyTest) == false) { continue; } if (FileSystem.ReadTriggers.CanReadFile(file.FullPath, file.Metadata, ReadOperation.Load) == false) { continue; } matchedFiles++; if (matchedFiles <= filesToSkip) { continue; } list.Add(file); addedFiles++; } actualStart += Paging.PageSize; }while (fileCount > 0 && addedFiles < Paging.PageSize && actualStart > 0 && actualStart < int.MaxValue); }); } else { if (fileNames != null && fileNames.Length > 0) { Storage.Batch(accessor => { foreach (var path in fileNames.Where(x => x != null).Select(FileHeader.Canonize)) { var file = accessor.ReadFile(path); if (file == null || file.Metadata.Keys.Contains(SynchronizationConstants.RavenDeleteMarker)) { list.Add(null); continue; } list.Add(file); } }); } else { int results; var keys = Search.Query(null, null, Paging.Start, Paging.PageSize, out results); Storage.Batch(accessor => list.AddRange(keys.Select(accessor.ReadFile).Where(x => x != null))); } } return(GetMessageWithObject(list) .WithNoCache()); }
internal static ISpan CreateProducerSpan(IApmAgent agent, ITopicPartition topicPartition, bool isTombstone, bool finishOnClose) { ISpan span = null; try { // no current transaction, don't create a span var currentTransaction = agent.Tracer.CurrentTransaction; if (currentTransaction is null) { return(span); } var topic = topicPartition?.Topic; var matcher = WildcardMatcher.AnyMatch(currentTransaction.Configuration.IgnoreMessageQueues, topic); if (matcher != null) { agent.Logger.Trace() ?.Log( "Not tracing message from {Queue} because it matched IgnoreMessageQueues pattern {Matcher}", topic, matcher.GetMatcher()); return(span); } var spanName = string.IsNullOrEmpty(topic) ? "Kafka SEND" : $"Kafka SEND to {topic}"; span = agent.GetCurrentExecutionSegment().StartSpan( spanName, ApiConstants.TypeMessaging, Subtype, isExitSpan: true); if (!string.IsNullOrEmpty(topic)) { span.Context.Message = new Message { Queue = new Queue { Name = topic } } } ; if (topicPartition?.Partition is not null && !topicPartition.Partition.IsSpecial) { span.SetLabel("partition", topicPartition.Partition.ToString()); } if (isTombstone) { span.SetLabel("tombstone", "true"); } } catch (Exception ex) { agent.Logger.Error()?.LogException(ex, "Error creating or populating kafka span."); } return(span); }
public void GetDocumentsWithIdStartingWith(string idPrefix, string matches, string exclude, int start, int pageSize, CancellationToken token, ref int nextStart, Action <RavenJObject> addDoc, string transformer = null, Dictionary <string, RavenJToken> transformerParameters = null, string skipAfter = null) { if (idPrefix == null) { throw new ArgumentNullException("idPrefix"); } idPrefix = idPrefix.Trim(); var canPerformRapidPagination = nextStart > 0 && start == nextStart; var actualStart = canPerformRapidPagination ? start : 0; var addedDocs = 0; var matchedDocs = 0; TransactionalStorage.Batch( actions => { var docsToSkip = canPerformRapidPagination ? 0 : start; int docCount; AbstractTransformer storedTransformer = null; if (transformer != null) { storedTransformer = IndexDefinitionStorage.GetTransformer(transformer); if (storedTransformer == null) { throw new InvalidOperationException("No transformer with the name: " + transformer); } } do { docCount = 0; var docs = actions.Documents.GetDocumentsWithIdStartingWith(idPrefix, actualStart, pageSize, string.IsNullOrEmpty(skipAfter) ? null : skipAfter); var documentRetriever = new DocumentRetriever(actions, Database.ReadTriggers, Database.InFlightTransactionalState, transformerParameters); foreach (var doc in docs) { token.ThrowIfCancellationRequested(); docCount++; var keyTest = doc.Key.Substring(idPrefix.Length); if (!WildcardMatcher.Matches(matches, keyTest) || WildcardMatcher.MatchesExclusion(exclude, keyTest)) { continue; } DocumentRetriever.EnsureIdInMetadata(doc); var nonAuthoritativeInformationBehavior = Database.InFlightTransactionalState.GetNonAuthoritativeInformationBehavior <JsonDocument>(null, doc.Key); var document = nonAuthoritativeInformationBehavior != null ? nonAuthoritativeInformationBehavior(doc) : doc; document = documentRetriever.ExecuteReadTriggers(document, null, ReadOperation.Load); if (document == null) { continue; } matchedDocs++; if (matchedDocs <= docsToSkip) { continue; } token.ThrowIfCancellationRequested(); if (storedTransformer != null) { using (new CurrentTransformationScope(Database, documentRetriever)) { var transformed = storedTransformer.TransformResultsDefinition(new[] { new DynamicJsonObject(document.ToJson()) }) .Select(x => JsonExtensions.ToJObject(x)) .ToArray(); if (transformed.Length == 0) { throw new InvalidOperationException("The transform results function failed on a document: " + document.Key); } var transformedJsonDocument = new JsonDocument { Etag = document.Etag.HashWith(storedTransformer.GetHashCodeBytes()).HashWith(documentRetriever.Etag), NonAuthoritativeInformation = document.NonAuthoritativeInformation, LastModified = document.LastModified, DataAsJson = new RavenJObject { { "$values", new RavenJArray(transformed) } }, }; addDoc(transformedJsonDocument.ToJson()); } } else { addDoc(document.ToJson()); } addedDocs++; if (addedDocs >= pageSize) { break; } } actualStart += pageSize; }while (docCount > 0 && addedDocs < pageSize && actualStart > 0 && actualStart < int.MaxValue); }); if (addedDocs != pageSize) { nextStart = start; // will mark as last page } else if (canPerformRapidPagination) { nextStart = start + matchedDocs; } else { nextStart = actualStart; } }
public FieldTermStack(IndexReader reader, int docId, String fieldName, FieldQuery fieldQuery, IState state) { this.fieldName = fieldName; var tfv = reader.GetTermFreqVector(docId, fieldName, state); if (tfv == null) { return; // just return to make null snippets } TermPositionVector tpv = null; try { tpv = (TermPositionVector)tfv; } catch (InvalidCastException) { return; // just return to make null snippets } List <String> termSet = fieldQuery.getTermSet(fieldName); // just return to make null snippet if un-matched fieldName specified when fieldMatch == true if (termSet == null) { return; } var needwildcard = termSet.Any(x => x.IndexOfAny(new char[] { '*', '?' }) != -1); foreach (String term in tpv.GetTerms()) { if (needwildcard) { if (termSet.Any(ts => WildcardMatcher.Matches(ts, term)) == false) { continue; } } else if (!termSet.Contains(term)) { continue; } int index = tpv.IndexOf(term); TermVectorOffsetInfo[] tvois = tpv.GetOffsets(index); if (tvois == null) { return; // just return to make null snippets } int[] poss = tpv.GetTermPositions(index); if (poss == null) { return; // just return to make null snippets } for (int i = 0; i < tvois.Length; i++) { termList.AddLast(new TermInfo(term, tvois[i].StartOffset, tvois[i].EndOffset, poss[i])); } } // sort by position //Collections.sort(termList); Sort(termList); }