public static Metric[] NonnegativeDerivative(EvalContext context, Metric[] metrics) { return (metrics.Select(m => { double?prev = null; return Metric.Transform(m, (d => { if (!d.HasValue || !prev.HasValue) { prev = d; return (double?)null; } // if var diff = d.Value - prev.Value; prev = d.Value; return diff >= 0 ? (double?)diff : null; })); }). ToArray()); }
public static Metric[] MostDeviant(EvalContext context, Metric[] metrics, int n) { return (metrics. Where(m => m.Series.Datapoints.Count > 0 && m.Series.Datapoints.Any(d => d.Value.HasValue)). Select(m => { var length = m.Series.Datapoints.Count; var sum = m.Series.Datapoints. Where(d => d.Value.HasValue). Select(d => d.Value.Value). Aggregate <double, double>(0f, (current, value) => current + value); var mean = sum / length; var squareSum = m.Series.Datapoints.Where(d => d.Value.HasValue).Select(d => Math.Pow(d.Value.Value - mean, 2)).Sum(); var sigma = squareSum / length; return new { sigma, metric = m }; }). OrderByDescending(m => m.sigma). Take(n). Select(m => m.metric). ToArray()); }
public static Metric[] Alias(EvalContext context, Metric[] metrics, string alias) { return (metrics. Select(m => new Metric(alias, m.Series)). ToArray()); }
public static Metric[] SortByAggregated(EvalContext context, Metric[] metrics, string aggregationFunction, string order) { var fn = ParseAggregationFunction(aggregationFunction); if (fn == null) { return(metrics); } var o = ParseOrder(order); if (o == null) { return(metrics); } var aggregated = metrics. Select(m => { var value = fn(m.Series.Datapoints); return(new { metric = m, value }); }); aggregated = o.Value == Order.Ascending ? aggregated.OrderBy(m => m.value) : aggregated.OrderByDescending(m => m.value); return(aggregated.Select(m => m.metric).ToArray()); }
public override object Evaluate(Environment environment, EvalContext context) { var parameters = Arguments.Select(a => a.Value.Evaluate(environment, context)).ToArray(); var function = environment.ResolveFunction(Name); return(function.Invoke(environment, context, parameters)); }
public static Metric[] WindowAggregatedAbove(EvalContext context, Metric[] metrics, string window, string aggregationFunction, double threshold) { var timeSpan = ParseTimeSpan(window); if (timeSpan == null) { return(metrics); } var fn = ParseAggregationFunction(aggregationFunction); if (fn == null) { return(metrics); } var windowStart = DateTime.UtcNow.Subtract(timeSpan.Value); return (metrics. Where(m => { var aggregated = fn(m.Series.Datapoints.Where(d => d.Timestamp >= windowStart)); return aggregated.HasValue && aggregated.Value > threshold; }). ToArray()); }
public static Metric[] GroupByFragment(EvalContext context, Metric[] metrics, int fragmentIndex, string callback) { var metaMetrics = metrics. Where(m => m.Name.Split('.').Length > fragmentIndex). GroupBy(m => m.Name.Split('.')[fragmentIndex]); if (callback == "sum") { var result = metaMetrics. Select(m => { var grouped = Sum(context, m.ToArray()); var series = new Series(context.From, context.Until, grouped.Series.Interval, grouped.Series.Datapoints); var nameFragments = m.First().Name.Split('.'); nameFragments[fragmentIndex] = "*"; var name = nameFragments[fragmentIndex]; // string.Join(".", nameFragments.Where((s, i) => i != fragmentIndex)); return(new Metric(name, series)); }). ToArray(); return(result); } // if return(null); }
public static Metric[] Ema(EvalContext context, Metric[] metrics, double smoothingFactor) { return (metrics.Select(m => { double?ema = 0, prevV = 0, prevEma = 0; var n = 0; return new Metric(m.Name, m.Series.Transform(v => { if (!v.HasValue) { return null; } if (n++ == 0) { prevV = prevEma = v.Value; return v.Value; } // if else { ema = smoothingFactor * prevV + (1 - smoothingFactor) * prevEma; prevV = v.Value; prevEma = ema; return ema; } // else })); }). ToArray()); }
public static Metric[] Integral(EvalContext context, Metric[] metrics) { return (metrics.Select(m => { var accumulator = 0d; return new Metric(m.Name, m.Series.Transform(v => accumulator += (v ?? 0d))); }). ToArray()); }
public static Metric[] SortByFragment(EvalContext context, Metric[] metrics, int fragmentIndex) { return (metrics.OrderBy(m => { var fragment = m.Name.Split('.').Where((s, i) => i == fragmentIndex).SingleOrDefault(); return fragment; }). ToArray()); }
public override object Evaluate(Environment environment, EvalContext context) { var from = context.From; var until = context.Until; var metricNames = environment.MetricRegistry.ResolveMetricNames(Expression.Selector); var metrics = MetricReader.ReadMetrics(environment, metricNames, from, until); return(metrics.ToArray()); }
public static Metric[] AliasByFragment(EvalContext context, Metric[] metrics, params int[] fragmentIndices) { return (metrics.Select(m => { var fragments = m.Name.Split('.').Where((s, i) => fragmentIndices.Contains(i)); var name = string.Join(".", fragments); return new Metric(name, m.Series); }). ToArray()); }
public static MetricSelector Timeshift(EvalContext context, MetricSelector selector, string offset) { var offsetDuration = ParseTimeSpan(offset); if (offsetDuration == null) { return(selector); } return(new MetricSelector(selector.Selector, selector.From.Add(offsetDuration.Value), selector.Until.Add(offsetDuration.Value))); }
public static Metric[] OffsetToZero(EvalContext context, Metric[] metrics) { var min = metrics. Where(m => m.Series.Datapoints.Count > 0 && m.Series.Datapoints.Any(d => d.Value.HasValue)). DefaultIfEmpty(). Min(m => m.Series.Datapoints.Where(d => d.Value.HasValue).Min(d => d.Value)); return (metrics. Select(m => Metric.Transform(m, d => d.HasValue ? d.Value - min : null)). ToArray()); }
public static Metric[] Summarize(EvalContext context, Metric[] metrics, string aggregationFunction, string bucket) { // // The double-parsing is due to legacy reasons. Statsify expects `summarize(metrics, aggregationFunction, bucket)`, // whereas Graphite-compatible clients expect `summarize(metrics, bucket, aggregationFunction)`. var bucketDuration = ParseTimeSpan(bucket) ?? ParseTimeSpan(aggregationFunction); if (!bucketDuration.HasValue) { return(metrics); } var fn = ParseAggregationFunction(aggregationFunction) ?? ParseAggregationFunction(bucket); if (fn == null) { return(metrics); } var until = context.Until.RoundToNearest(bucketDuration.Value); var from = context.From.RoundToNearest(bucketDuration.Value); return (metrics.Select(m => { if (m.Series.Interval > bucketDuration) { return m; } var series = new Queue <Datapoint>(m.Series.Datapoints.OrderByDescending(d => d.Timestamp)); var datapoints = new List <Datapoint>(); var timestamp = until.Subtract(bucketDuration.Value); while (timestamp > from) { var value = fn(series.DequeueWhile(d => d.Timestamp >= timestamp)); var datapoint = new Datapoint(timestamp.Add(bucketDuration.Value), value); datapoints.Add(datapoint); timestamp = timestamp.Subtract(bucketDuration.Value); } // while return new Metric(m.Name, new Series(m.Series.From, m.Series.Until, bucketDuration.Value, datapoints.OrderBy(d => d.Timestamp))); }). ToArray()); }
public static Metric[] AggregatedBelow(EvalContext context, Metric[] metrics, string aggregationFunction, double threshold) { var fn = ParseAggregationFunction(aggregationFunction); if (fn == null) { return(metrics); } return (metrics. Where(m => { var aggregated = fn(m.Series.Datapoints); return aggregated.HasValue && aggregated.Value < threshold; }). ToArray()); }
public object Invoke(Environment environment, EvalContext context, object[] parameters) { var p = new List <object> { context }; var pis = methodInfo.GetParameters(); var paramsPi = pis.SingleOrDefault(pi => pi.GetCustomAttributes(typeof(ParamArrayAttribute), false).OfType <ParamArrayAttribute>().SingleOrDefault() != null); var hasParams = paramsPi != null; var hasMetric = pis.All(pi => pi.ParameterType != typeof(MetricSelector)); // // First parameter must always be an EvalContext instance if (hasParams) { p.AddRange(parameters.Take(pis.Length - 2)); var @params = parameters.Skip(pis.Length - 2).ToArray(); var par = Array.CreateInstance(paramsPi.ParameterType.GetElementType(), @params.Length); Array.Copy(@params, par, @params.Length); p.Add(par); } // if else { p.AddRange(parameters); } if (hasMetric) { var pos = p.FindIndex(_p => _p is MetricSelector); if (pos > -1) { var ms = p[pos] as MetricSelector; var metricNames = environment.MetricRegistry.ResolveMetricNames(ms.Selector); var metrics = MetricReader.ReadMetrics(environment, metricNames, context.From, context.Until); p[pos] = metrics.ToArray(); } // if } // if return(methodInfo.Invoke(null, p.ToArray())); }
public static Metric[] KeepLastValue(EvalContext context, Metric[] metrics) { return (metrics.Select(m => { double?prev = null; return Metric.Transform(m, d => { if (!d.HasValue) { return prev; } prev = d.Value; return d; }); }). ToArray()); }
public static Metric[] RandomMetric(EvalContext context, string name, int number) { var r = new Random(); var from = context.From.ToUnixTimestamp(); var until = context.Until.ToUnixTimestamp(); var metrics = Enumerable.Range(0, number). Select(n => { double value = 0; var datapoints = Enumerable.Range(0, (int)(until - @from)). Select(v => new Datapoint(context.From.AddSeconds(v), value = value + r.NextDouble() - 0.5)); var series = new Series(context.From, context.Until, TimeSpan.FromSeconds(1), datapoints); return(new Metric(number == 1 ? name : string.Format("{0}.{1}", name, n + 1), series)); }). ToArray(); return(metrics); }
public static Metric Sum(EvalContext context, Metric[] metrics) { if (metrics.Length == 0) { return(null); } if (!metrics.AllEqual(m => m.Series.Interval)) { return(null); } var interval = metrics[0].Series.Interval; var from = metrics.Min(m => m.Series.From); var until = metrics.Max(m => m.Series.Until); var datapoints = new List <Datapoint>(); for (var timestamp = from; timestamp <= until; timestamp += interval) { datapoints.Add(new Datapoint(timestamp, metrics.SelectMany(m => m.Series.Datapoints.Where(d => d.Timestamp == timestamp)).Select(d => d.Value).Sum())); } // for return(new Metric("", new Series(from, until, interval, datapoints))); }
public override object Evaluate(Environment environment, EvalContext context) { return(new MetricSelector(Selector, context.From, context.Until)); }
public override object Evaluate(Environment environment, EvalContext context) { return(Value); }
public static Metric[] SortByName(EvalContext context, Metric[] metrics) { return(metrics.OrderBy(m => m.Name).ToArray()); }
public static Metric[] Abs(EvalContext context, Metric[] metrics) { return (metrics.Select(m => new Metric(m.Name, m.Series.Transform(v => v.HasValue ? Math.Abs(v.Value) : (double?)null))).ToArray()); }
public static Metric[] RandomMetric(EvalContext context, string name) { return(RandomMetric(context, name, 1)); }
public static Metric[] Scale(EvalContext context, Metric[] metrics, double scale) { return (metrics.Select(m => new Metric(m.Name, m.Series.Transform(v => v.HasValue ? v.Value * scale : (double?)null))).ToArray()); }