/// <summary> /// Queries openHistorian as a Grafana data source. /// </summary> /// <param name="request">Query request.</param> /// <param name="cancellationToken">Cancellation token.</param> public Task<List<TimeSeriesValues>> Query(QueryRequest request, CancellationToken cancellationToken) { // Task allows processing of multiple simultaneous queries return Task.Factory.StartNew(() => { if (!request.format?.Equals("json", StringComparison.OrdinalIgnoreCase) ?? false) throw new InvalidOperationException("Only JSON formatted query requests are currently supported."); DateTime startTime = request.range.from.ParseJsonTimestamp(); DateTime stopTime = request.range.to.ParseJsonTimestamp(); HashSet<string> targets = new HashSet<string>(request.targets.Select(target => target.target), StringComparer.OrdinalIgnoreCase); foreach (string target in request.targets.Select(target => target.target)) targets.UnionWith(TargetCache.GetOrAdd(target, () => AdapterBase.ParseInputMeasurementKeys(Metadata, false, target).Select(key => key.TagFromKey(Metadata)).ToArray())); Dictionary<ulong, string> targetMap = new Dictionary<ulong, string>(); foreach (string target in targets) { MeasurementKey key = TargetCache.GetOrAdd(target, () => target.KeyFromTag(Metadata)); if (key != MeasurementKey.Undefined) targetMap[key.ID] = target; } return QueryTimeSeriesValues(startTime, stopTime, request.maxDataPoints, targetMap, cancellationToken); }, cancellationToken); }
/// <summary> /// Queries data source returning data as Grafana time-series data set. /// </summary> /// <param name="request">Query request.</param> /// <param name="cancellationToken">Cancellation token.</param> public Task <List <TimeSeriesValues> > Query(QueryRequest request, CancellationToken cancellationToken) { bool isFilterMatch(string target, AdHocFilter filter) { // Default to positive match on failures return(TargetCache <bool> .GetOrAdd($"filter!{filter.key}{filter.@operator}{filter.value}", () => { try { DataRow metadata = LookupTargetMetadata(target); if (metadata == null) { return true; } dynamic left = metadata[filter.key]; dynamic right = Convert.ChangeType(filter.value, metadata.Table.Columns[filter.key].DataType); switch (filter.@operator) { case "=": case "==": return left == right; case "!=": case "<>": return left != right; case "<": return left < right; case "<=": return left <= right; case ">": return left > right; case ">=": return left >= right; } return true; } catch { return true; } })); } float lookupTargetCoordinate(string target, string field) { return(TargetCache <float> .GetOrAdd($"{target}_{field}", () => LookupTargetMetadata(target)?.ConvertNullableField <float>(field) ?? 0.0F)); } // Task allows processing of multiple simultaneous queries return(Task.Factory.StartNew(() => { if (!string.IsNullOrWhiteSpace(request.format) && !request.format.Equals("json", StringComparison.OrdinalIgnoreCase)) { throw new InvalidOperationException("Only JSON formatted query requests are currently supported."); } DateTime startTime = request.range.from.ParseJsonTimestamp(); DateTime stopTime = request.range.to.ParseJsonTimestamp(); foreach (Target target in request.targets) { target.target = target.target?.Trim() ?? ""; } DataSourceValueGroup[] valueGroups = request.targets.Select(target => QueryTarget(target, target.target, startTime, stopTime, request.interval, true, false, cancellationToken)).SelectMany(groups => groups).ToArray(); // Establish result series sequentially so that order remains consistent between calls List <TimeSeriesValues> result = valueGroups.Select(valueGroup => new TimeSeriesValues { target = valueGroup.Target, rootTarget = valueGroup.RootTarget, latitude = lookupTargetCoordinate(valueGroup.RootTarget, "Latitude"), longitude = lookupTargetCoordinate(valueGroup.RootTarget, "Longitude"), dropEmptySeries = valueGroup.DropEmptySeries }).ToList(); // Apply any encountered ad-hoc filters if (request.adhocFilters?.Count > 0) { foreach (AdHocFilter filter in request.adhocFilters) { result = result.Where(values => isFilterMatch(values.rootTarget, filter)).ToList(); } } // Process series data in parallel Parallel.ForEach(result, new ParallelOptions { CancellationToken = cancellationToken }, series => { // For deferred enumerations, any work to be done is left till last moment - in this case "ToList()" invokes actual operation DataSourceValueGroup valueGroup = valueGroups.First(group => group.Target.Equals(series.target)); IEnumerable <DataSourceValue> values = valueGroup.Source; if (valueGroup.SourceTarget?.excludeNormalFlags ?? false) { values = values.Where(value => value.Flags != MeasurementStateFlags.Normal); } if (valueGroup.SourceTarget?.excludedFlags > uint.MinValue) { values = values.Where(value => ((uint)value.Flags & valueGroup.SourceTarget.excludedFlags) == 0); } series.datapoints = values.Select(dataValue => new[] { dataValue.Value, dataValue.Time }).ToList(); }); #region [ Original "request.maxDataPoints" Implementation ] //int maxDataPoints = (int)(request.maxDataPoints * 1.1D); //// Make a final pass through data to decimate returned point volume (for graphing purposes), if needed //foreach (TimeSeriesValues series in result) //{ // if (series.datapoints.Count > maxDataPoints) // { // double indexFactor = series.datapoints.Count / (double)request.maxDataPoints; // series.datapoints = Enumerable.Range(0, request.maxDataPoints).Select(index => series.datapoints[(int)(index * indexFactor)]).ToList(); // } //} #endregion return result.Where(values => !values.dropEmptySeries || values.datapoints.Count > 0).ToList(); }, cancellationToken)); }
/// <summary> /// Queries openHistorian as a Grafana data source. /// </summary> /// <param name="request">Query request.</param> public async Task<List<TimeSeriesValues>> Query(QueryRequest request) { // Abort if services are not enabled if (!Enabled || (object)Archive == null) return null; return await m_dataSource.Query(request, m_cancellationSource.Token); }