public void CounterSetItemsDisabledTest() { var context = new DefaultMetricsContext(); var config = new PrometheusReportConfig { OutputSetItems = false }; var counter = context.Counter("Counter", Unit.None, MetricTags.None); counter.Increment(1); var counterSubItems = context.Counter("CounterSubItems", Unit.None, MetricTags.None); counterSubItems.Increment("Item1", 2); counterSubItems.Increment("Item2", 3); string renderedMetrics = RenderTestMetrics(context, config); string expectedMetrics = FormatPrometheusText(@"# TYPE Counter gauge counter 1 0 # TYPE CounterSubItems gauge countersubitems 5 0 "); Assert.AreEqual(expectedMetrics, renderedMetrics); }
public void InfluxReport_CanAddRecords_ForCounter() { var config = new InfluxConfig("localhost", "testdb"); var writer = new InfluxdbTestWriter(config); config.Writer = writer; var report = new InfluxdbHttpReport(config); var context = new DefaultMetricsContext("TestContext"); var precision = config.Precision ?? InfluxConfig.Default.Precision; var metricsData = context.DataProvider.CurrentMetricsData; var counter = context.Counter("test_counter", Unit.Bytes, new MetricTags("key1=value1,tag2,tag3,key4=value4")); // add normally counter.Increment(300); metricsData = context.DataProvider.CurrentMetricsData; report.RunReport(metricsData, hsFunc, CancellationToken.None); writer.LastBatch.Should().HaveCount(1); var expTime = InfluxLineProtocol.FormatTimestamp(metricsData.Timestamp, precision); writer.LastBatch[0].ToLineProtocol(precision).Should().Be($@"testcontext.test_counter,key1=value1,key4=value4 count=300i {expTime}"); // add with set item counter.Increment("item1,item2=ival2,item3=ival3", 100); metricsData = context.DataProvider.CurrentMetricsData; report.RunReport(metricsData, hsFunc, CancellationToken.None); writer.LastBatch.Should().HaveCount(2); expTime = InfluxLineProtocol.FormatTimestamp(metricsData.Timestamp, precision); writer.LastBatch[0].ToLineProtocol(precision).Should().Be($@"testcontext.test_counter,key1=value1,key4=value4 count=400i {expTime}"); writer.LastBatch[1].ToLineProtocol(precision).Should().Be($@"testcontext.test_counter,item2=ival2,item3=ival3,key1=value1,key4=value4 count=100i,percent=25 {expTime}"); }
public void AddMetrics_EnsureLimitIsRespected() { var context = new DefaultMetricsContext(); var sender = new FakeSignalFxReporter(); var report = new SignalFxReport( sender, "", "FakeApiKey", new Dictionary <string, string> { { "System", "UnitTests" } }, 50, new HashSet <MetricDetails> { MetricDetails.count }); var accountNoRandom = new Random(); // var accountId = new byte[16]; var generatedHases = new HashSet <string>(); while (generatedHases.Count < 51) { accountNoRandom.NextBytes(accountId); var accountIdString = accountId.Aggregate(new StringBuilder(), (builder, b) => builder.AppendFormat("{0:x2}", b)).ToString(); if (generatedHases.Contains(accountIdString)) { continue; } generatedHases.Add(accountIdString); var counter = context.Counter("TestCounter", Unit.Calls, new MetricTags()); counter.Increment(accountIdString, accountNoRandom.Next()); } var source = new CancellationTokenSource(); report.RunReport(context.DataProvider.CurrentMetricsData, () => new HealthStatus(), source.Token); Assert.True(sender.Count >= 1); var message = sender[0]; Assert.Equal(50, message.datapoints.Count); }
public void AddMetrics_AllMetricsGetReported() { var context = new DefaultMetricsContext(); var sender = new FakeSignalFxReporter(); var report = new SignalFxReport( sender, "", "FakeApiKey", new Dictionary <string, string> { { "System", "UnitTests" } }, 10000, null); var tags = new MetricTags("test=value"); var totalExpectedMetrics = 0; var timer = context.Timer("TestTimer", Unit.Calls, SamplingType.ExponentiallyDecaying, TimeUnit.Microseconds, TimeUnit.Microseconds, tags); timer.Record(10053, TimeUnit.Microseconds); // A single Timer produces 17 metrics // Count // Active_Sessions // Rate-Mean-Calls-per-us // Rate-1-min-Calls-per-us // Rate-5-min-Calls-per-us // Rate-15-min-Calls-per-us // Duration-Last-us // Duration-Min-us // Duration-Mean-us // Duration-Max-us // Duration-StdDev-us // Duration-p75-us // Duration-p95-us // Duration-p95-us // Duration-p98-us // Duration-p99-us // Duration-p999-us totalExpectedMetrics += 17; context.Gauge("TestGuage", () => 3.3, Unit.KiloBytes, tags); // A gauge is a single metric totalExpectedMetrics += 1; var counter = context.Counter("TestCounter", Unit.KiloBytes, tags); counter.Increment("SetA", 2); counter.Increment("SetB", 5); // We're setting two sub counters within counter // plus there is a "Total" counter // and for each of the two, there is a total and a percentage totalExpectedMetrics += 5; var histogram = context.Histogram("TestHistogram", Unit.Events, SamplingType.ExponentiallyDecaying, tags); histogram.Update(23, "ABC"); histogram.Update(14, "DEF"); // Histogram of events produces 12 metrics: // Count-Events // Last-Events // Min-Events // Mean-Events // Max-Events // StdDev-Events // p75-Events // p95-Events // p95-Events // p98-Events // p99-Events // p999-Events totalExpectedMetrics += 12; var meter = context.Meter("TestMeter", Unit.MegaBytes, TimeUnit.Seconds, tags); meter.Mark("A", 12); meter.Mark("B", 190); // Meters result in the following 5 metrics for MB // Total-Mb // Rate-Mean-Mb // Rate-1-min-Mb // Rate-5-min-Mb // Rate-15-min-Mb // // And then for each item that is marked, there's a set of these 6 // Percent-Mb // Count-Mb // Rate-Mean-Mb-per-s // Rate-1-min-Mb-per-s // Rate-5-min-Mb-per-s // Rate-15-min-Mb-per-s totalExpectedMetrics += 17; // so our total metrics now is 52 var source = new CancellationTokenSource(); report.RunReport(context.DataProvider.CurrentMetricsData, () => new HealthStatus(), source.Token); Assert.Equal(1, sender.Count); var message = sender[0]; Assert.Equal(totalExpectedMetrics, message.datapoints.Count); }