public void InMemoryExporterShouldDeepCopyMetricPoints() { var meter = new Meter("InMemoryExporterTests", "1.0"); var exportedItems = new List <Metric>(); using var inMemoryReader = new BaseExportingMetricReader(new InMemoryExporter <Metric>(exportedItems)) { PreferredAggregationTemporality = AggregationTemporality.Delta, }; using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter("InMemoryExporterTests") .AddReader(inMemoryReader) .Build(); var counter = meter.CreateCounter <long>("meter"); // Emit 10 for the MetricPoint with a single key-vaue pair: ("tag1", "value1") counter.Add(10, new KeyValuePair <string, object>("tag1", "value1")); // Pull metric data from AggregatorStore inMemoryReader.Collect(); var metric = exportedItems[0]; // Only one Metric object is added to the collection at this point var metricPointsEnumerator = metric.GetMetricPoints().GetEnumerator(); Assert.True(metricPointsEnumerator.MoveNext()); // One MetricPoint is emitted for the Metric ref var metricPointForFirstExport = ref metricPointsEnumerator.Current;
public void Setup() { var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { double sum = 0; foreach (var metric in batch) { if (this.UseWithRef) { // The performant way of iterating. foreach (ref var metricPoint in metric.GetMetricPoints()) { sum += metricPoint.GetCounterSumDouble(); } } else { // The non-performant way of iterating. // This is still "correct", but less performant. foreach (var metricPoint in metric.GetMetricPoints()) { sum += metricPoint.GetCounterSumDouble(); } } } } this.reader = new BaseExportingMetricReader(metricExporter) { Temporality = AggregationTemporality.Cumulative, }; this.meter = new Meter(Utils.GetCurrentMethodName()); this.provider = Sdk.CreateMeterProviderBuilder() .AddMeter(this.meter.Name) .AddReader(this.reader) .Build(); this.counter = this.meter.CreateCounter <double>("counter"); this.token = new CancellationTokenSource(); this.writeMetricTask = new Task(() => { while (!this.token.IsCancellationRequested) { var tag1 = new KeyValuePair <string, object>("DimName1", this.dimensionValues[this.random.Next(0, 10)]); var tag2 = new KeyValuePair <string, object>("DimName2", this.dimensionValues[this.random.Next(0, 10)]); var tag3 = new KeyValuePair <string, object>("DimName3", this.dimensionValues[this.random.Next(0, 10)]); this.counter.Add(100.00, tag1, tag2, tag3); } }); this.writeMetricTask.Start(); }
public void ObservableCounterAggregationTest(bool exportDelta) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = exportDelta ? AggregationTemporality.Delta : AggregationTemporality.Cumulative, }; using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{exportDelta}"); int i = 1; var counterLong = meter.CreateObservableCounter( "observable-counter", () => { return(new List <Measurement <long> >() { new Measurement <long>(i++ *10), }); }); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); metricReader.Collect(); long sumReceived = GetLongSum(metricItems); Assert.Equal(10, sumReceived); metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(10, sumReceived); } else { Assert.Equal(20, sumReceived); } metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(10, sumReceived); } else { Assert.Equal(30, sumReceived); } }
public void TestMetricPointCap(AggregationTemporality temporality) { var metricItems = new List <Metric>(); int metricPointCount = 0; var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metric in batch) { foreach (ref var metricPoint in metric.GetMetricPoints()) { metricPointCount++; } } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; using var meter = new Meter("TestPointCapMeter"); var counterLong = meter.CreateCounter <long>("mycounterCapTest"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter("TestPointCapMeter") .AddReader(metricReader) .Build(); // Make one Add with no tags. // as currently we reserve 0th index // for no tag point! // This may be changed later. counterLong.Add(10); for (int i = 0; i < AggregatorStore.MaxMetricPoints + 1; i++) { counterLong.Add(10, new KeyValuePair <string, object>("key", "value" + i)); } metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, metricPointCount); metricPointCount = 0; metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, metricPointCount); // These updates would be dropped. counterLong.Add(10, new KeyValuePair <string, object>("key", "valueA")); counterLong.Add(10, new KeyValuePair <string, object>("key", "valueB")); counterLong.Add(10, new KeyValuePair <string, object>("key", "valueC")); metricPointCount = 0; metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, metricPointCount); }
private void MultithreadedCounterTest <T>(T deltaValueUpdatedByEachCall) where T : struct, IComparable { var metricItems = new List <Metric>(); var metricReader = new BaseExportingMetricReader(new InMemoryExporter <Metric>(metricItems)); using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{typeof(T).Name}.{deltaValueUpdatedByEachCall}"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); var argToThread = new UpdateThreadArguments <T> { ValuesToRecord = new T[] { deltaValueUpdatedByEachCall }, Instrument = meter.CreateCounter <T>("counter"), MreToBlockUpdateThread = new ManualResetEvent(false), MreToEnsureAllThreadsStart = new ManualResetEvent(false), }; Thread[] t = new Thread[numberOfThreads]; for (int i = 0; i < numberOfThreads; i++) { t[i] = new Thread(CounterUpdateThread <T>); t[i].Start(argToThread); } argToThread.MreToEnsureAllThreadsStart.WaitOne(); Stopwatch sw = Stopwatch.StartNew(); argToThread.MreToBlockUpdateThread.Set(); for (int i = 0; i < numberOfThreads; i++) { t[i].Join(); } this.output.WriteLine($"Took {sw.ElapsedMilliseconds} msecs. Total threads: {numberOfThreads}, each thread doing {numberOfMetricUpdateByEachThread} recordings."); metricReader.Collect(); if (typeof(T) == typeof(long)) { var sumReceived = GetLongSum(metricItems); var expectedSum = deltaLongValueUpdatedByEachCall * numberOfMetricUpdateByEachThread * numberOfThreads; Assert.Equal(expectedSum, sumReceived); } else if (typeof(T) == typeof(double)) { var sumReceived = GetDoubleSum(metricItems); var expectedSum = deltaDoubleValueUpdatedByEachCall * numberOfMetricUpdateByEachThread * numberOfThreads; Assert.Equal(expectedSum, sumReceived, 2); } }
public void StreamNamesDuplicatesAreNotAllowedTest(AggregationTemporality temporality) { var metricItems = new List <Metric>(); int metricCount = 0; var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metric in batch) { metricCount++; } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; using var meter1 = new Meter("TestDuplicateMetricName1"); using var meter2 = new Meter("TestDuplicateMetricName2"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter("TestDuplicateMetricName1") .AddMeter("TestDuplicateMetricName2") .AddReader(metricReader) .Build(); // Expecting one metric stream. var counterLong = meter1.CreateCounter <long>("name1"); counterLong.Add(10); metricReader.Collect(); Assert.Equal(1, metricCount); // The following will be ignored as // metric of same name exists. // Metric stream will remain one. var anotherCounterSameName = meter1.CreateCounter <long>("name1"); anotherCounterSameName.Add(10); metricCount = 0; metricReader.Collect(); Assert.Equal(1, metricCount); // The following will also be ignored // as the name is same. // (the Meter name is not part of stream name) var anotherCounterSameNameDiffMeter = meter2.CreateCounter <long>("name1"); anotherCounterSameNameDiffMeter.Add(10); metricCount = 0; metricReader.Collect(); Assert.Equal(1, metricCount); }
public void TestInstrumentDisposal(AggregationTemporality temporality) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; var meter1 = new Meter($"{Utils.GetCurrentMethodName()}.{temporality}.1"); var meter2 = new Meter($"{Utils.GetCurrentMethodName()}.{temporality}.2"); var counter1 = meter1.CreateCounter <long>("counterFromMeter1"); var counter2 = meter2.CreateCounter <long>("counterFromMeter2"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter1.Name) .AddMeter(meter2.Name) .AddReader(metricReader) .Build(); counter1.Add(10, new KeyValuePair <string, object>("key", "value")); counter2.Add(10, new KeyValuePair <string, object>("key", "value")); metricReader.Collect(); Assert.Equal(2, metricItems.Count); metricItems.Clear(); counter1.Add(10, new KeyValuePair <string, object>("key", "value")); counter2.Add(10, new KeyValuePair <string, object>("key", "value")); meter1.Dispose(); metricReader.Collect(); Assert.Equal(2, metricItems.Count); metricItems.Clear(); counter1.Add(10, new KeyValuePair <string, object>("key", "value")); counter2.Add(10, new KeyValuePair <string, object>("key", "value")); metricReader.Collect(); Assert.Single(metricItems); metricItems.Clear(); counter1.Add(10, new KeyValuePair <string, object>("key", "value")); counter2.Add(10, new KeyValuePair <string, object>("key", "value")); meter2.Dispose(); metricReader.Collect(); Assert.Single(metricItems); metricItems.Clear(); counter1.Add(10, new KeyValuePair <string, object>("key", "value")); counter2.Add(10, new KeyValuePair <string, object>("key", "value")); metricReader.Collect(); Assert.Empty(metricItems); }
private void MultithreadedHistogramTest <T>(long[] expected, T[] values) where T : struct, IComparable { var bucketCounts = new long[11]; var metricReader = new BaseExportingMetricReader(new TestExporter <Metric>(batch => { foreach (var metric in batch) { foreach (var metricPoint in metric.GetMetricPoints()) { bucketCounts = metricPoint.GetHistogramBuckets().RunningBucketCounts; } } })); using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{typeof(T).Name}"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); var argsToThread = new UpdateThreadArguments <T> { Instrument = meter.CreateHistogram <T>("histogram"), MreToBlockUpdateThread = new ManualResetEvent(false), MreToEnsureAllThreadsStart = new ManualResetEvent(false), ValuesToRecord = values, }; Thread[] t = new Thread[numberOfThreads]; for (int i = 0; i < numberOfThreads; i++) { t[i] = new Thread(HistogramUpdateThread <T>); t[i].Start(argsToThread); } argsToThread.MreToEnsureAllThreadsStart.WaitOne(); Stopwatch sw = Stopwatch.StartNew(); argsToThread.MreToBlockUpdateThread.Set(); for (int i = 0; i < numberOfThreads; i++) { t[i].Join(); } this.output.WriteLine($"Took {sw.ElapsedMilliseconds} msecs. Total threads: {numberOfThreads}, each thread doing {numberOfMetricUpdateByEachThread * values.Length} recordings."); metricReader.Collect(); Assert.Equal(expected, bucketCounts); }
public void FlushMetricExporterTest(ExportModes mode) { BaseExporter <Metric> exporter = null; switch (mode) { case ExportModes.Push: exporter = new PushOnlyMetricExporter(); break; case ExportModes.Pull: exporter = new PullOnlyMetricExporter(); break; case ExportModes.Pull | ExportModes.Push: exporter = new PushPullMetricExporter(); break; } var reader = new BaseExportingMetricReader(exporter); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddReader(reader) .Build(); switch (mode) { case ExportModes.Push: Assert.True(reader.Collect()); Assert.True(meterProvider.ForceFlush()); break; case ExportModes.Pull: Assert.False(reader.Collect()); Assert.False(meterProvider.ForceFlush()); Assert.True((exporter as IPullMetricExporter).Collect(-1)); break; case ExportModes.Pull | ExportModes.Push: Assert.True(reader.Collect()); Assert.True(meterProvider.ForceFlush()); break; } }
public void DuplicateInstrumentNamesFromDifferentMetersAreAllowed(AggregationTemporality temporality, bool hasView) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; using var meter1 = new Meter($"{Utils.GetCurrentMethodName()}.1.{temporality}"); using var meter2 = new Meter($"{Utils.GetCurrentMethodName()}.2.{temporality}"); var meterProviderBuilder = Sdk.CreateMeterProviderBuilder() .AddMeter(meter1.Name) .AddMeter(meter2.Name) .AddReader(metricReader); if (hasView) { meterProviderBuilder.AddView("name1", new MetricStreamConfiguration() { Description = "description" }); } using var meterProvider = meterProviderBuilder.Build(); // Expecting one metric stream. var counterLong = meter1.CreateCounter <long>("name1"); counterLong.Add(10); metricReader.Collect(); Assert.Single(metricItems); // The following will not be ignored // as it is the same metric name but different meter. var anotherCounterSameNameDiffMeter = meter2.CreateCounter <long>("name1"); anotherCounterSameNameDiffMeter.Add(10); counterLong.Add(10); metricItems.Clear(); metricReader.Collect(); Assert.Equal(2, metricItems.Count); }
public void DuplicateInstrumentNamesFromSameMeterAreNotAllowed(AggregationTemporality temporality, bool hasView) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{temporality}"); var meterProviderBuilder = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader); if (hasView) { meterProviderBuilder.AddView("name1", new MetricStreamConfiguration() { Description = "description" }); } using var meterProvider = meterProviderBuilder.Build(); // Expecting one metric stream. var counterLong = meter.CreateCounter <long>("name1"); counterLong.Add(10); metricReader.Collect(); Assert.Single(metricItems); // The following will be ignored as // metric of same name exists. // Metric stream will remain one. var anotherCounterSameName = meter.CreateCounter <long>("name1"); anotherCounterSameName.Add(10); counterLong.Add(10); metricItems.Clear(); metricReader.Collect(); Assert.Single(metricItems); }
public void MultithreadedLongCounterTest() { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = AggregationTemporality.Cumulative, }; using var meter = new Meter(Utils.GetCurrentMethodName()); var counterLong = meter.CreateCounter <long>("mycounter"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); // setup args to threads. var mreToBlockUpdateThreads = new ManualResetEvent(false); var mreToEnsureAllThreadsStarted = new ManualResetEvent(false); var argToThread = new UpdateThreadArguments <long>(); argToThread.DeltaValueUpdatedByEachCall = deltaLongValueUpdatedByEachCall; argToThread.Counter = counterLong; argToThread.ThreadsStartedCount = 0; argToThread.MreToBlockUpdateThread = mreToBlockUpdateThreads; argToThread.MreToEnsureAllThreadsStart = mreToEnsureAllThreadsStarted; Thread[] t = new Thread[numberOfThreads]; for (int i = 0; i < numberOfThreads; i++) { t[i] = new Thread(CounterUpdateThread <long>); t[i].Start(argToThread); } // Block until all threads started. mreToEnsureAllThreadsStarted.WaitOne(); Stopwatch sw = Stopwatch.StartNew(); // unblock all the threads. // (i.e let them start counter.Add) mreToBlockUpdateThreads.Set(); for (int i = 0; i < numberOfThreads; i++) { // wait for all threads to complete t[i].Join(); } var timeTakenInMilliseconds = sw.ElapsedMilliseconds; this.output.WriteLine($"Took {timeTakenInMilliseconds} msecs. Total threads: {numberOfThreads}, each thread doing {numberOfMetricUpdateByEachThread} recordings."); metricReader.Collect(); var sumReceived = GetLongSum(metricItems); var expectedSum = deltaLongValueUpdatedByEachCall * numberOfMetricUpdateByEachThread * numberOfThreads; Assert.Equal(expectedSum, sumReceived); }
public void TestHistogramToOltpMetric(string name, string description, string unit, long?longValue, double?doubleValue, AggregationTemporality aggregationTemporality, params object[] keysValues) { var metrics = new List <Metric>(); var metricReader = new BaseExportingMetricReader(new InMemoryExporter <Metric>(metrics)); metricReader.Temporality = aggregationTemporality; using var meter = new Meter(Utils.GetCurrentMethodName()); using var provider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); var attributes = ToAttributes(keysValues).ToArray(); if (longValue.HasValue) { var histogram = meter.CreateHistogram <long>(name, unit, description); histogram.Record(longValue.Value, attributes); } else { var histogram = meter.CreateHistogram <double>(name, unit, description); histogram.Record(doubleValue.Value, attributes); } provider.ForceFlush(); var batch = new Batch <Metric>(metrics.ToArray(), metrics.Count); var request = new OtlpCollector.ExportMetricsServiceRequest(); request.AddMetrics(ResourceBuilder.CreateEmpty().Build().ToOtlpResource(), batch); var resourceMetric = request.ResourceMetrics.Single(); var instrumentationLibraryMetrics = resourceMetric.InstrumentationLibraryMetrics.Single(); var actual = instrumentationLibraryMetrics.Metrics.Single(); Assert.Equal(name, actual.Name); Assert.Equal(description ?? string.Empty, actual.Description); Assert.Equal(unit ?? string.Empty, actual.Unit); Assert.Equal(OtlpMetrics.Metric.DataOneofCase.Histogram, actual.DataCase); Assert.Null(actual.Gauge); Assert.Null(actual.Sum); Assert.NotNull(actual.Histogram); Assert.Null(actual.ExponentialHistogram); Assert.Null(actual.Summary); var otlpAggregationTemporality = aggregationTemporality == AggregationTemporality.Cumulative ? OtlpMetrics.AggregationTemporality.Cumulative : OtlpMetrics.AggregationTemporality.Delta; Assert.Equal(otlpAggregationTemporality, actual.Histogram.AggregationTemporality); Assert.Single(actual.Histogram.DataPoints); var dataPoint = actual.Histogram.DataPoints.First(); Assert.True(dataPoint.StartTimeUnixNano > 0); Assert.True(dataPoint.TimeUnixNano > 0); Assert.Equal(1UL, dataPoint.Count); if (longValue.HasValue) { Assert.Equal((double)longValue, dataPoint.Sum); } else { Assert.Equal(doubleValue, dataPoint.Sum); } int bucketIndex; for (bucketIndex = 0; bucketIndex < dataPoint.ExplicitBounds.Count; ++bucketIndex) { if (dataPoint.Sum <= dataPoint.ExplicitBounds[bucketIndex]) { break; } Assert.Equal(0UL, dataPoint.BucketCounts[bucketIndex]); } Assert.Equal(1UL, dataPoint.BucketCounts[bucketIndex]); if (attributes.Length > 0) { OtlpTestHelpers.AssertOtlpAttributes(attributes, dataPoint.Attributes); } else { Assert.Empty(dataPoint.Attributes); } Assert.Empty(dataPoint.Exemplars); #pragma warning disable CS0612 // Type or member is obsolete Assert.Null(actual.IntGauge); Assert.Null(actual.IntSum); Assert.Null(actual.IntHistogram); Assert.Empty(dataPoint.Labels); #pragma warning restore CS0612 // Type or member is obsolete }
public void SdkSupportsMultipleReaders(AggregationTemporality aggregationTemporality, bool hasViews) { var exportedItems1 = new List <Metric>(); using var deltaExporter1 = new InMemoryExporter <Metric>(exportedItems1); using var deltaReader1 = new BaseExportingMetricReader(deltaExporter1) { Temporality = AggregationTemporality.Delta, }; var exportedItems2 = new List <Metric>(); using var deltaExporter2 = new InMemoryExporter <Metric>(exportedItems2); using var deltaReader2 = new BaseExportingMetricReader(deltaExporter2) { Temporality = aggregationTemporality, }; using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{aggregationTemporality}.{hasViews}"); var counter = meter.CreateCounter <long>("counter"); int index = 0; var values = new long[] { 100, 200, 300, 400 }; long GetValue() => values[index++]; var gauge = meter.CreateObservableGauge("gauge", () => GetValue()); var meterProviderBuilder = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(deltaReader1) .AddReader(deltaReader2); if (hasViews) { meterProviderBuilder.AddView("counter", "renamedCounter"); } using var meterProvider = meterProviderBuilder.Build(); counter.Add(10, new KeyValuePair <string, object>("key", "value")); meterProvider.ForceFlush(); Assert.Equal(2, exportedItems1.Count); Assert.Equal(2, exportedItems2.Count); // Check value exported for Counter this.AssertLongSumValueForMetric(exportedItems1[0], 10); this.AssertLongSumValueForMetric(exportedItems2[0], 10); // Check value exported for Gauge this.AssertLongSumValueForMetric(exportedItems1[1], 100); this.AssertLongSumValueForMetric(exportedItems2[1], 200); exportedItems1.Clear(); exportedItems2.Clear(); counter.Add(15, new KeyValuePair <string, object>("key", "value")); meterProvider.ForceFlush(); Assert.Equal(2, exportedItems1.Count); Assert.Equal(2, exportedItems2.Count); // Check value exported for Counter this.AssertLongSumValueForMetric(exportedItems1[0], 15); if (aggregationTemporality == AggregationTemporality.Delta) { this.AssertLongSumValueForMetric(exportedItems2[0], 15); } else { this.AssertLongSumValueForMetric(exportedItems2[0], 25); } // Check value exported for Gauge this.AssertLongSumValueForMetric(exportedItems1[1], 300); this.AssertLongSumValueForMetric(exportedItems2[1], 400); }
public void MultithreadedDoubleCounterTest() { var metricItems = new List <Metric>(); var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metricItem in batch) { metricItems.Add(metricItem); } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = AggregationTemporality.Cumulative, }; using var meter = new Meter("TestDoubleCounterMeter"); var counterDouble = meter.CreateCounter <double>("mycounter"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter("TestDoubleCounterMeter") .AddReader(metricReader) .Build(); // setup args to threads. var mreToBlockUpdateThreads = new ManualResetEvent(false); var mreToEnsureAllThreadsStarted = new ManualResetEvent(false); var argToThread = new UpdateThreadArguments <double>(); argToThread.DeltaValueUpdatedByEachCall = deltaDoubleValueUpdatedByEachCall; argToThread.Counter = counterDouble; argToThread.ThreadsStartedCount = 0; argToThread.MreToBlockUpdateThread = mreToBlockUpdateThreads; argToThread.MreToEnsureAllThreadsStart = mreToEnsureAllThreadsStarted; Thread[] t = new Thread[numberOfThreads]; for (int i = 0; i < numberOfThreads; i++) { t[i] = new Thread(CounterUpdateThread <double>); t[i].Start(argToThread); } // Block until all threads started. mreToEnsureAllThreadsStarted.WaitOne(); Stopwatch sw = Stopwatch.StartNew(); // unblock all the threads. // (i.e let them start counter.Add) mreToBlockUpdateThreads.Set(); for (int i = 0; i < numberOfThreads; i++) { // wait for all threads to complete t[i].Join(); } var timeTakenInMilliseconds = sw.ElapsedMilliseconds; this.output.WriteLine($"Took {timeTakenInMilliseconds} msecs. Total threads: {numberOfThreads}, each thread doing {numberOfMetricUpdateByEachThread} recordings."); metricReader.Collect(); var sumReceived = GetDoubleSum(metricItems); var expectedSum = deltaDoubleValueUpdatedByEachCall * numberOfMetricUpdateByEachThread * numberOfThreads; var difference = Math.Abs(sumReceived - expectedSum); Assert.True(difference <= 0.0001); }
public void ObservableCounterAggregationTest(bool exportDelta) { var meterName = "TestMeter" + exportDelta; var metricItems = new List <Metric>(); var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metricItem in batch) { metricItems.Add(metricItem); } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = exportDelta ? AggregationTemporality.Delta : AggregationTemporality.Cumulative, }; using var meter = new Meter(meterName); int i = 1; var counterLong = meter.CreateObservableCounter <long>( "observable-counter", () => { return(new List <Measurement <long> >() { new Measurement <long>(i++ *10), }); }); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meterName) .AddReader(metricReader) .Build(); metricReader.Collect(); long sumReceived = GetLongSum(metricItems); Assert.Equal(10, sumReceived); metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(10, sumReceived); } else { Assert.Equal(20, sumReceived); } metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(10, sumReceived); } else { Assert.Equal(30, sumReceived); } }
public void CounterAggregationTest(bool exportDelta) { var metricItems = new List <Metric>(); var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metricItem in batch) { metricItems.Add(metricItem); } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = exportDelta ? AggregationTemporality.Delta : AggregationTemporality.Cumulative, }; using var meter = new Meter("TestMeter"); var counterLong = meter.CreateCounter <long>("mycounter"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter("TestMeter") .AddReader(metricReader) .Build(); counterLong.Add(10); counterLong.Add(10); metricReader.Collect(); long sumReceived = GetLongSum(metricItems); Assert.Equal(20, sumReceived); metricItems.Clear(); counterLong.Add(10); counterLong.Add(10); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(20, sumReceived); } else { Assert.Equal(40, sumReceived); } metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(0, sumReceived); } else { Assert.Equal(40, sumReceived); } metricItems.Clear(); counterLong.Add(40); counterLong.Add(20); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(60, sumReceived); } else { Assert.Equal(100, sumReceived); } }
public async Task HttpOutCallsAreCollectedSuccessfullyAsync(HttpTestData.HttpOutTestCase tc) { var serverLifeTime = TestHttpServer.RunServer( (ctx) => { ctx.Response.StatusCode = tc.ResponseCode == 0 ? 200 : tc.ResponseCode; ctx.Response.OutputStream.Close(); }, out var host, out var port); var processor = new Mock <BaseProcessor <Activity> >(); tc.Url = HttpTestData.NormalizeValues(tc.Url, host, port); var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { Temporality = AggregationTemporality.Cumulative, }; var meterProvider = Sdk.CreateMeterProviderBuilder() .AddHttpClientInstrumentation() .AddReader(metricReader) .Build(); using (serverLifeTime) using (Sdk.CreateTracerProviderBuilder() .AddHttpClientInstrumentation((opt) => { opt.SetHttpFlavor = tc.SetHttpFlavor; opt.Enrich = ActivityEnrichment; opt.RecordException = tc.RecordException.HasValue ? tc.RecordException.Value : false; }) .AddProcessor(processor.Object) .Build()) { try { using var c = new HttpClient(); var request = new HttpRequestMessage { RequestUri = new Uri(tc.Url), Method = new HttpMethod(tc.Method), Version = new Version(2, 0), }; if (tc.Headers != null) { foreach (var header in tc.Headers) { request.Headers.Add(header.Key, header.Value); } } await c.SendAsync(request); } catch (Exception) { // test case can intentionally send request that will result in exception } } meterProvider.Dispose(); var requestMetrics = metricItems .Where(metric => metric.Name == "http.client.duration") .ToArray(); Assert.Equal(5, processor.Invocations.Count); // SetParentProvider/OnStart/OnEnd/OnShutdown/Dispose called. var activity = (Activity)processor.Invocations[2].Arguments[0]; Assert.Equal(ActivityKind.Client, activity.Kind); Assert.Equal(tc.SpanName, activity.DisplayName); // Assert.Equal(tc.SpanStatus, d[span.Status.CanonicalCode]); Assert.Equal( tc.SpanStatus, activity.GetTagValue(SpanAttributeConstants.StatusCodeKey) as string); if (tc.SpanStatusHasDescription.HasValue) { var desc = activity.GetTagValue(SpanAttributeConstants.StatusDescriptionKey) as string; Assert.Equal(tc.SpanStatusHasDescription.Value, !string.IsNullOrEmpty(desc)); } var normalizedAttributes = activity.TagObjects.Where(kv => !kv.Key.StartsWith("otel.")).ToImmutableSortedDictionary(x => x.Key, x => x.Value.ToString()); var normalizedAttributesTestCase = tc.SpanAttributes.ToDictionary(x => x.Key, x => HttpTestData.NormalizeValues(x.Value, host, port)); Assert.Equal(normalizedAttributesTestCase.Count, normalizedAttributes.Count); foreach (var kv in normalizedAttributesTestCase) { Assert.Contains(activity.TagObjects, i => i.Key == kv.Key && i.Value.ToString().Equals(kv.Value, StringComparison.InvariantCultureIgnoreCase)); } if (tc.RecordException.HasValue && tc.RecordException.Value) { Assert.Single(activity.Events.Where(evt => evt.Name.Equals("exception"))); } if (tc.ResponseExpected) { Assert.Single(requestMetrics); var metric = requestMetrics[0]; Assert.NotNull(metric); Assert.True(metric.MetricType == MetricType.Histogram); var metricPoints = new List <MetricPoint>(); foreach (var p in metric.GetMetricPoints()) { metricPoints.Add(p); } Assert.Single(metricPoints); var metricPoint = metricPoints[0]; var count = metricPoint.GetHistogramCount(); var sum = metricPoint.GetHistogramSum(); Assert.Equal(1L, count); Assert.Equal(activity.Duration.TotalMilliseconds, sum); var attributes = new KeyValuePair <string, object> [metricPoint.Tags.Count]; int i = 0; foreach (var tag in metricPoint.Tags) { attributes[i++] = tag; } var method = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpMethod, tc.Method); var scheme = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpScheme, "http"); var statusCode = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpStatusCode, tc.ResponseCode == 0 ? 200 : tc.ResponseCode); var flavor = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpFlavor, "2.0"); Assert.Contains(method, attributes); Assert.Contains(scheme, attributes); Assert.Contains(statusCode, attributes); Assert.Contains(flavor, attributes); Assert.Equal(4, attributes.Length); } else { Assert.Empty(requestMetrics); } }
public void TestMetricPointCap(AggregationTemporality temporality) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); int MetricPointCount() { var count = 0; foreach (var metric in metricItems) { foreach (ref var metricPoint in metric.GetMetricPoints()) { count++; } } return(count); } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = temporality, }; using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{temporality}"); var counterLong = meter.CreateCounter <long>("mycounterCapTest"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); // Make one Add with no tags. // as currently we reserve 0th index // for no tag point! // This may be changed later. counterLong.Add(10); for (int i = 0; i < AggregatorStore.MaxMetricPoints + 1; i++) { counterLong.Add(10, new KeyValuePair <string, object>("key", "value" + i)); } metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, MetricPointCount()); metricItems.Clear(); counterLong.Add(10); for (int i = 0; i < AggregatorStore.MaxMetricPoints + 1; i++) { counterLong.Add(10, new KeyValuePair <string, object>("key", "value" + i)); } metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, MetricPointCount()); counterLong.Add(10); for (int i = 0; i < AggregatorStore.MaxMetricPoints + 1; i++) { counterLong.Add(10, new KeyValuePair <string, object>("key", "value" + i)); } // These updates would be dropped. counterLong.Add(10, new KeyValuePair <string, object>("key", "valueA")); counterLong.Add(10, new KeyValuePair <string, object>("key", "valueB")); counterLong.Add(10, new KeyValuePair <string, object>("key", "valueC")); metricItems.Clear(); metricReader.Collect(); Assert.Equal(AggregatorStore.MaxMetricPoints, MetricPointCount()); }
public void CounterAggregationTest(bool exportDelta) { var metricItems = new List <Metric>(); var metricExporter = new InMemoryExporter <Metric>(metricItems); var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = exportDelta ? AggregationTemporality.Delta : AggregationTemporality.Cumulative, }; using var meter = new Meter($"{Utils.GetCurrentMethodName()}.{exportDelta}"); var counterLong = meter.CreateCounter <long>("mycounter"); using var meterProvider = Sdk.CreateMeterProviderBuilder() .AddMeter(meter.Name) .AddReader(metricReader) .Build(); counterLong.Add(10); counterLong.Add(10); metricReader.Collect(); long sumReceived = GetLongSum(metricItems); Assert.Equal(20, sumReceived); metricItems.Clear(); counterLong.Add(10); counterLong.Add(10); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(20, sumReceived); } else { Assert.Equal(40, sumReceived); } metricItems.Clear(); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(0, sumReceived); } else { Assert.Equal(40, sumReceived); } metricItems.Clear(); counterLong.Add(40); counterLong.Add(20); metricReader.Collect(); sumReceived = GetLongSum(metricItems); if (exportDelta) { Assert.Equal(60, sumReceived); } else { Assert.Equal(100, sumReceived); } }
public async Task RequestMetricIsCaptured() { var metricItems = new List <Metric>(); var metricExporter = new TestExporter <Metric>(ProcessExport); void ProcessExport(Batch <Metric> batch) { foreach (var metricItem in batch) { metricItems.Add(metricItem); } } var metricReader = new BaseExportingMetricReader(metricExporter) { PreferredAggregationTemporality = AggregationTemporality.Cumulative, }; this.meterProvider = Sdk.CreateMeterProviderBuilder() .AddAspNetCoreInstrumentation() .AddReader(metricReader) .Build(); using (var client = this.factory.CreateClient()) { var response = await client.GetAsync("/api/values"); response.EnsureSuccessStatusCode(); } // We need to let End callback execute as it is executed AFTER response was returned. // In unit tests environment there may be a lot of parallel unit tests executed, so // giving some breezing room for the End callback to complete await Task.Delay(TimeSpan.FromSeconds(1)); this.meterProvider.Dispose(); var requestMetrics = metricItems .Where(item => item.Name == "http.server.duration") .ToArray(); Assert.True(requestMetrics.Length == 1); var metric = requestMetrics[0]; Assert.NotNull(metric); Assert.True(metric.MetricType == MetricType.Histogram); var metricPoints = new List <MetricPoint>(); foreach (var p in metric.GetMetricPoints()) { metricPoints.Add(p); } Assert.Single(metricPoints); var metricPoint = metricPoints[0]; Assert.Equal(1L, metricPoint.LongValue); Assert.True(metricPoint.DoubleValue > 0); /* * var bucket = metric.Buckets * .Where(b => * metric.PopulationSum > b.LowBoundary && * metric.PopulationSum <= b.HighBoundary) * .FirstOrDefault(); * Assert.NotEqual(default, bucket); * Assert.Equal(1, bucket.Count); */ var attributes = new KeyValuePair <string, object> [metricPoint.Keys.Length]; for (int i = 0; i < attributes.Length; i++) { attributes[i] = new KeyValuePair <string, object>(metricPoint.Keys[i], metricPoint.Values[i]); } var method = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpMethod, "GET"); var scheme = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpScheme, "http"); var statusCode = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpStatusCode, 200); var flavor = new KeyValuePair <string, object>(SemanticConventions.AttributeHttpFlavor, "HTTP/1.1"); Assert.Contains(method, attributes); Assert.Contains(scheme, attributes); Assert.Contains(statusCode, attributes); Assert.Contains(flavor, attributes); Assert.Equal(4, attributes.Length); }
public void ToOtlpResourceMetricsTest(bool includeServiceNameInResource) { var resourceBuilder = ResourceBuilder.CreateEmpty(); if (includeServiceNameInResource) { resourceBuilder.AddAttributes( new List <KeyValuePair <string, object> > { new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceName, "service-name"), new KeyValuePair <string, object>(ResourceSemanticConventions.AttributeServiceNamespace, "ns1"), }); } var tags = new KeyValuePair <string, object>[] { new KeyValuePair <string, object>("key1", "value1"), new KeyValuePair <string, object>("key2", "value2"), }; var metricReader = new BaseExportingMetricReader(new TestExporter <Metric>(RunTest)) { PreferredAggregationTemporality = AggregationTemporality.Delta, }; using var provider = Sdk.CreateMeterProviderBuilder() .SetResourceBuilder(resourceBuilder) .AddMeter("TestMeter") .AddReader(metricReader) .Build(); using var meter = new Meter("TestMeter", "0.0.1"); var counter = meter.CreateCounter <int>("counter"); counter.Add(100, tags); var testCompleted = false; // Invokes the TestExporter which will invoke RunTest metricReader.Collect(); Assert.True(testCompleted); void RunTest(Batch <Metric> metrics) { var request = new OtlpCollector.ExportMetricsServiceRequest(); request.AddMetrics(resourceBuilder.Build().ToOtlpResource(), metrics); Assert.Single(request.ResourceMetrics); var resourceMetric = request.ResourceMetrics.First(); var oltpResource = resourceMetric.Resource; if (includeServiceNameInResource) { Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.StringValue == "service-name"); Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceNamespace && kvp.Value.StringValue == "ns1"); } else { Assert.Contains(oltpResource.Attributes, (kvp) => kvp.Key == ResourceSemanticConventions.AttributeServiceName && kvp.Value.ToString().Contains("unknown_service:")); } Assert.Single(resourceMetric.InstrumentationLibraryMetrics); var instrumentationLibraryMetrics = resourceMetric.InstrumentationLibraryMetrics.First(); Assert.Equal(string.Empty, instrumentationLibraryMetrics.SchemaUrl); Assert.Equal("TestMeter", instrumentationLibraryMetrics.InstrumentationLibrary.Name); Assert.Equal("0.0.1", instrumentationLibraryMetrics.InstrumentationLibrary.Version); Assert.Single(instrumentationLibraryMetrics.Metrics); foreach (var metric in instrumentationLibraryMetrics.Metrics) { Assert.Equal(string.Empty, metric.Description); Assert.Equal(string.Empty, metric.Unit); Assert.Equal("counter", metric.Name); Assert.Equal(OtlpMetrics.Metric.DataOneofCase.Sum, metric.DataCase); Assert.True(metric.Sum.IsMonotonic); Assert.Equal(OtlpMetrics.AggregationTemporality.Delta, metric.Sum.AggregationTemporality); Assert.Single(metric.Sum.DataPoints); var dataPoint = metric.Sum.DataPoints.First(); Assert.True(dataPoint.StartTimeUnixNano > 0); Assert.True(dataPoint.TimeUnixNano > 0); Assert.Equal(OtlpMetrics.NumberDataPoint.ValueOneofCase.AsInt, dataPoint.ValueCase); Assert.Equal(100, dataPoint.AsInt); #pragma warning disable CS0612 // Type or member is obsolete Assert.Empty(dataPoint.Labels); #pragma warning restore CS0612 // Type or member is obsolete OtlpTestHelpers.AssertOtlpAttributes(tags.ToList(), dataPoint.Attributes); Assert.Empty(dataPoint.Exemplars); } testCompleted = true; } }