/// <summary> /// Queries data source returning data as Grafana time-series data set. /// </summary> /// <param name="request">Query request.</param> /// <param name="cancellationToken">Cancellation token.</param> public Task <List <TimeSeriesValues> > Query(QueryRequest request, CancellationToken cancellationToken) { bool isFilterMatch(string target, AdHocFilter filter) { // Default to positive match on failures return(TargetCache <bool> .GetOrAdd($"filter!{filter.key}{filter.@operator}{filter.value}", () => { try { DataRow metadata = LookupTargetMetadata(target); if (metadata == null) { return true; } dynamic left = metadata[filter.key]; dynamic right = Convert.ChangeType(filter.value, metadata.Table.Columns[filter.key].DataType); switch (filter.@operator) { case "=": case "==": return left == right; case "!=": case "<>": return left != right; case "<": return left < right; case "<=": return left <= right; case ">": return left > right; case ">=": return left >= right; } return true; } catch { return true; } })); } float lookupTargetCoordinate(string target, string field) { return(TargetCache <float> .GetOrAdd($"{target}_{field}", () => LookupTargetMetadata(target)?.ConvertNullableField <float>(field) ?? 0.0F)); } // Task allows processing of multiple simultaneous queries return(Task.Factory.StartNew(() => { if (!string.IsNullOrWhiteSpace(request.format) && !request.format.Equals("json", StringComparison.OrdinalIgnoreCase)) { throw new InvalidOperationException("Only JSON formatted query requests are currently supported."); } DateTime startTime = request.range.from.ParseJsonTimestamp(); DateTime stopTime = request.range.to.ParseJsonTimestamp(); foreach (Target target in request.targets) { target.target = target.target?.Trim() ?? ""; } DataSourceValueGroup[] valueGroups = request.targets.Select(target => QueryTarget(target, target.target, startTime, stopTime, request.interval, true, false, cancellationToken)).SelectMany(groups => groups).ToArray(); // Establish result series sequentially so that order remains consistent between calls List <TimeSeriesValues> result = valueGroups.Select(valueGroup => new TimeSeriesValues { target = valueGroup.Target, rootTarget = valueGroup.RootTarget, latitude = lookupTargetCoordinate(valueGroup.RootTarget, "Latitude"), longitude = lookupTargetCoordinate(valueGroup.RootTarget, "Longitude"), dropEmptySeries = valueGroup.DropEmptySeries }).ToList(); // Apply any encountered ad-hoc filters if (request.adhocFilters?.Count > 0) { foreach (AdHocFilter filter in request.adhocFilters) { result = result.Where(values => isFilterMatch(values.rootTarget, filter)).ToList(); } } // Process series data in parallel Parallel.ForEach(result, new ParallelOptions { CancellationToken = cancellationToken }, series => { // For deferred enumerations, any work to be done is left till last moment - in this case "ToList()" invokes actual operation DataSourceValueGroup valueGroup = valueGroups.First(group => group.Target.Equals(series.target)); IEnumerable <DataSourceValue> values = valueGroup.Source; if (valueGroup.SourceTarget?.excludeNormalFlags ?? false) { values = values.Where(value => value.Flags != MeasurementStateFlags.Normal); } if (valueGroup.SourceTarget?.excludedFlags > uint.MinValue) { values = values.Where(value => ((uint)value.Flags & valueGroup.SourceTarget.excludedFlags) == 0); } series.datapoints = values.Select(dataValue => new[] { dataValue.Value, dataValue.Time }).ToList(); }); #region [ Original "request.maxDataPoints" Implementation ] //int maxDataPoints = (int)(request.maxDataPoints * 1.1D); //// Make a final pass through data to decimate returned point volume (for graphing purposes), if needed //foreach (TimeSeriesValues series in result) //{ // if (series.datapoints.Count > maxDataPoints) // { // double indexFactor = series.datapoints.Count / (double)request.maxDataPoints; // series.datapoints = Enumerable.Range(0, request.maxDataPoints).Select(index => series.datapoints[(int)(index * indexFactor)]).ToList(); // } //} #endregion return result.Where(values => !values.dropEmptySeries || values.datapoints.Count > 0).ToList(); }, cancellationToken)); }
private IEnumerable <DataSourceValueGroup> ExecuteSeriesFunction(Target sourceTarget, Tuple <SeriesFunction, string, GroupOperation> parsedFunction, DateTime startTime, DateTime stopTime, string interval, bool decimate, bool dropEmptySeries, CancellationToken cancellationToken) { SeriesFunction seriesFunction = parsedFunction.Item1; string expression = parsedFunction.Item2; GroupOperation groupOperation = parsedFunction.Item3; // Parse out function parameters and target expression Tuple <string[], string> expressionParameters = TargetCache <Tuple <string[], string> > .GetOrAdd(expression, () => { List <string> parsedParameters = new List <string>(); // Extract any required function parameters int requiredParameters = s_requiredParameters[seriesFunction]; // Safe: no lock needed since content doesn't change // Any slice operation adds one required parameter for time tolerance if (groupOperation == GroupOperation.Slice) { requiredParameters++; } if (requiredParameters > 0) { int index = 0; for (int i = 0; i < requiredParameters && index > -1; i++) { index = expression.IndexOf(',', index + 1); } if (index > -1) { parsedParameters.AddRange(expression.Substring(0, index).Split(',')); } if (parsedParameters.Count == requiredParameters) { expression = expression.Substring(index + 1).Trim(); } else { throw new FormatException($"Expected {requiredParameters + 1} parameters, received {parsedParameters.Count + 1} in: {(groupOperation == GroupOperation.None ? "" : groupOperation.ToString())}{seriesFunction}({expression})"); } } // Extract any provided optional function parameters int optionalParameters = s_optionalParameters[seriesFunction]; // Safe: no lock needed since content doesn't change bool hasSubExpression(string target) => target.StartsWith("FILTER", StringComparison.OrdinalIgnoreCase) || target.Contains("("); if (optionalParameters > 0) { int index = expression.IndexOf(','); int lastIndex; if (index > -1 && !hasSubExpression(expression.Substring(0, index))) { lastIndex = index; for (int i = 1; i < optionalParameters && index > -1; i++) { index = expression.IndexOf(',', index + 1); if (index > -1 && hasSubExpression(expression.Substring(lastIndex + 1, index - lastIndex - 1).Trim())) { index = lastIndex; break; } lastIndex = index; } if (index > -1) { parsedParameters.AddRange(expression.Substring(0, index).Split(',')); expression = expression.Substring(index + 1).Trim(); } } } return(new Tuple <string[], string>(parsedParameters.ToArray(), expression)); }); string[] parameters = expressionParameters.Item1; string queryExpression = expressionParameters.Item2; // Final function parameter is always target expression // When accurate calculation results are requested, query data source at full resolution if (seriesFunction == SeriesFunction.Interval && ParseFloat(parameters[0]) == 0.0D) { decimate = false; } // Query function expression to get series data IEnumerable <DataSourceValueGroup> dataset = QueryTarget(sourceTarget, queryExpression, startTime, stopTime, interval, decimate, dropEmptySeries, cancellationToken); // Handle label function as a special edge case - group operations on label are ignored if (seriesFunction == SeriesFunction.Label) { // Derive labels string label = parameters[0]; if (label.StartsWith("\"") || label.StartsWith("'")) { label = label.Substring(1, label.Length - 2); } DataSourceValueGroup[] valueGroups = dataset.ToArray(); string[] seriesLabels = new string[valueGroups.Length]; for (int i = 0; i < valueGroups.Length; i++) { string target = valueGroups[i].RootTarget; seriesLabels[i] = TargetCache <string> .GetOrAdd($"{label}@{target}", () => { string table, derivedLabel; string[] components = label.Split('.'); if (components.Length == 2) { table = components[0].Trim(); derivedLabel = components[1].Trim(); } else { table = "ActiveMeasurements"; derivedLabel = label; } DataRow record = target.MetadataRecordFromTag(Metadata, table); if (record != null && derivedLabel.IndexOf('{') >= 0) { foreach (string fieldName in record.Table.Columns.Cast <DataColumn>().Select(column => column.ColumnName)) { derivedLabel = derivedLabel.ReplaceCaseInsensitive($"{{{fieldName}}}", record[fieldName].ToString()); } } // ReSharper disable once AccessToModifiedClosure if (derivedLabel.Equals(label, StringComparison.Ordinal)) { derivedLabel = $"{label}{(valueGroups.Length > 1 ? $" {i + 1}" : "")}"; } return(derivedLabel); }); } // Verify that all series labels are unique if (seriesLabels.Length > 1) { HashSet <string> uniqueLabelSet = new HashSet <string>(StringComparer.OrdinalIgnoreCase); for (int i = 0; i < seriesLabels.Length; i++) { while (uniqueLabelSet.Contains(seriesLabels[i])) { seriesLabels[i] = $"{seriesLabels[i]}\u00A0"; // Suffixing with non-breaking space for label uniqueness } uniqueLabelSet.Add(seriesLabels[i]); } } for (int i = 0; i < valueGroups.Length; i++) { yield return(new DataSourceValueGroup { Target = seriesLabels[i], RootTarget = valueGroups[i].RootTarget, SourceTarget = sourceTarget, Source = valueGroups[i].Source, DropEmptySeries = dropEmptySeries }); } } else { switch (groupOperation) { case GroupOperation.Set: { // Flatten all series into a single enumerable DataSourceValueGroup valueGroup = new DataSourceValueGroup { Target = $"Set{seriesFunction}({string.Join(", ", parameters)}{(parameters.Length > 0 ? ", " : "")}{queryExpression})", RootTarget = queryExpression, SourceTarget = sourceTarget, Source = ExecuteSeriesFunctionOverSource(dataset.AsParallel().WithCancellation(cancellationToken).SelectMany(source => source.Source), seriesFunction, parameters), DropEmptySeries = dropEmptySeries }; // Handle edge-case set operations - for these functions there is data in the target series as well if (seriesFunction == SeriesFunction.Minimum || seriesFunction == SeriesFunction.Maximum || seriesFunction == SeriesFunction.Median) { DataSourceValue dataValue = valueGroup.Source.First(); valueGroup.Target = $"Set{seriesFunction} = {dataValue.Target}"; valueGroup.RootTarget = dataValue.Target; } yield return(valueGroup); break; } case GroupOperation.Slice: { TimeSliceScanner scanner = new TimeSliceScanner(dataset, ParseFloat(parameters[0]) / SI.Milli); parameters = parameters.Skip(1).ToArray(); foreach (DataSourceValueGroup valueGroup in ExecuteSeriesFunctionOverTimeSlices(scanner, seriesFunction, parameters, cancellationToken)) { yield return(new DataSourceValueGroup { Target = $"Slice{seriesFunction}({string.Join(", ", parameters)}{(parameters.Length > 0 ? ", " : "")}{valueGroup.Target})", RootTarget = valueGroup.RootTarget ?? valueGroup.Target, SourceTarget = sourceTarget, Source = valueGroup.Source, DropEmptySeries = dropEmptySeries }); } break; } default: { foreach (DataSourceValueGroup valueGroup in dataset) { yield return(new DataSourceValueGroup { Target = $"{seriesFunction}({string.Join(", ", parameters)}{(parameters.Length > 0 ? ", " : "")}{valueGroup.Target})", RootTarget = valueGroup.RootTarget ?? valueGroup.Target, SourceTarget = sourceTarget, Source = ExecuteSeriesFunctionOverSource(valueGroup.Source, seriesFunction, parameters), DropEmptySeries = dropEmptySeries }); } break; } } } }