public void Clean_IgnoresReportsThatHaveNotReportedAnyEvents() { ResetCleaner(); const string context = ""; var registry = new DefaultMetricsRegistry(); EventMetricsCleaner.ContextRegistries.Add(context, registry); var timer = new MockTimer(); EventMetricsCleaner.EnableTestTimer(timer); var reportIndex1 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var reportIndex2 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var metric = new EventMetric(); registry.Event(MetricName, () => { return(metric); }, MetricTags.None); metric.Record(); EventMetricsCleaner.UpdateTotalReportedEvents(reportIndex1, registry.DataProvider.Events); GetReportsReportedEventDetailCount(reportIndex1, metricNameTypeTags).Should().Be(1); GetReportsReportedEventDetailCount(reportIndex2, metricNameTypeTags).Should().Be(0); timer.OnTimerCallback(); var registryCounts = GetRegistryEventDetailCounts(MetricNameType); registryCounts[context].Should().Be(0); }
public void RecordEventMetricReflectionDataRangeTest() { UserEventObject myDataObject = new UserEventObject("Data Range Test"); //warm up the object just to get rid of first hit performance EventMetric.Register(myDataObject); EventMetric.Write(myDataObject); //and we're going to write out a BUNCH of samples Trace.TraceInformation("Starting reflection data range test"); DateTime curTime = DateTime.Now; //We have to limit ourselves to 32000 samples to stay within short. for (short curSample = 0; curSample < 32000; curSample++) { //we have a LOT of numbers we need to set to increment this object. myDataObject.SetValues(curSample, 32000); //sets all of the numerics //and write it out again just for kicks EventMetric.Write(myDataObject); } TimeSpan duration = DateTime.Now - curTime; Trace.TraceInformation("Completed reflection data range test in {0} milliseconds for 32,000 samples", duration.TotalMilliseconds); Log.Verbose(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.EventMetric.Attributes", "Event Metrics performance test flush", null); }
public void Clean_WithNoReportsRegistered_RemovesAllEvents() { ResetCleaner(); const string context = ""; var registry = new DefaultMetricsRegistry(); EventMetricsCleaner.ContextRegistries.Add(context, registry); var timer = new MockTimer(); EventMetricsCleaner.EnableTestTimer(timer); var metric = new EventMetric(); registry.Event(MetricName, () => { return(metric); }, MetricTags.None); metric.Record(); metric.Record(); metric.Record(); var registryCounts = GetRegistryEventDetailCounts(MetricNameType); registryCounts[context].Should().Be(3); timer.OnTimerCallback(); registryCounts = GetRegistryEventDetailCounts(MetricNameType); registryCounts[context].Should().Be(0); }
private void SynchronizedMetricRegistration() { string name = Thread.CurrentThread.Name; Trace.TraceInformation("{0} started", name); EventMetricDefinition newDefinition = new EventMetricDefinition("EventMetricTests", "Gibraltar.Monitor.Test", "Sync"); newDefinition.AddValue("delta", typeof(double), SummaryFunction.RunningSum, null, "Delta", "The applied delta"); try { Interlocked.Increment(ref m_ThreadCounter); lock (m_SyncLock) { // Do nothing, just release it immediately. } EventMetricDefinition.Register(ref newDefinition); EventMetric metric = EventMetric.Register(newDefinition, name); Trace.TraceInformation("{0} completed registration of event metric", name); EventMetricSample sample = metric.CreateSample(); sample.SetValue("delta", Thread.CurrentThread.ManagedThreadId); sample.Write(); } catch (Exception ex) { m_ThreadFailed = true; Trace.TraceError("{0} got {1}: {2}", name, ex.GetType().Name, ex.Message, ex); } Interlocked.Decrement(ref m_ThreadCounter); }
/// <summary> /// Record an event metric using a programmatic declaration /// </summary> /// <param name="pagesLoaded"></param> public static void RecordCacheMetric(int pagesLoaded) { EventMetricDefinition cacheMetric; //so we can be called multiple times we want to see if the definition already exists. if (EventMetricDefinition.TryGetValue("GibraltarSample", "Database.Engine", "Cache", out cacheMetric) == false) { cacheMetric = new EventMetricDefinition("GibraltarSample", "Database.Engine", "Cache"); //add the values (that are part of the definition) cacheMetric.AddValue("pages", typeof(int), SummaryFunction.Average, "Pages", "Pages in Cache", "Total number of pages in cache"); cacheMetric.AddValue("size", typeof(int), SummaryFunction.Average, "Bytes", "Cache Size", "Total number of bytes used by pages in cache"); //and now that we're done, we need to register this definition. This locks the definition //and makes it go live. Note that it's based by ref because if another thread registered the same metric, we'll get the //registered object (whoever one the race), not necessarily the one we've just created to pass in. EventMetricDefinition.Register(ref cacheMetric); } //Now we can get the specific metric we want to record samples under (this is an instance of the definition) EventMetric cacheEventMetric = EventMetric.Register(cacheMetric, null); //now go ahead and write that sample. EventMetricSample newSample = cacheEventMetric.CreateSample(); newSample.SetValue("pages", pagesLoaded); newSample.SetValue("size", pagesLoaded * 8196); newSample.Write(); }
public void Record(CommandEndEventData eventData) { OnStop(); Duration = eventData.Duration; EventMetric.Write(this); }
public void Clean_WithAllReportsFilteringOutEvents_RemovesAllEvents() { ResetCleaner(); const string context = ""; var registry = new DefaultMetricsRegistry(); EventMetricsCleaner.ContextRegistries.Add(context, registry); var timer = new MockTimer(); EventMetricsCleaner.EnableTestTimer(timer); var reportIndex1 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var reportIndex2 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var metric = new EventMetric(); registry.Event(MetricName, () => { return(metric); }, MetricTags.None); metric.Record(); metric.Record(); metric.Record(); EventMetricsCleaner.UpdateTotalReportedEvents(reportIndex1, new List <EventValueSource>()); EventMetricsCleaner.UpdateTotalReportedEvents(reportIndex2, new List <EventValueSource>()); GetReportsReportedEventDetailCount(reportIndex1, MetricNameType).Should().Be(0); GetReportsReportedEventDetailCount(reportIndex2, MetricNameType).Should().Be(0); var registryCounts = GetRegistryEventDetailCounts(MetricNameType); registryCounts[context].Should().Be(3); timer.OnTimerCallback(); registryCounts = GetRegistryEventDetailCounts(MetricNameType); registryCounts[context].Should().Be(0); }
public void RecordEventMetricReflection() { UserEventObject myDataObject = new UserEventObject(null); EventMetric.Register(myDataObject); EventMetricDefinition metricDefinition; Assert.IsTrue(EventMetricDefinition.TryGetValue(typeof(UserEventObject), out metricDefinition)); EventMetricDefinition.Write(myDataObject); // Now try it with inheritance and interfaces in the mix. UserMultipleEventObject bigDataObject = new UserMultipleEventObject(null); EventMetric.Register(bigDataObject); // There's no event at the top level, so this lookup should fail. Assert.IsFalse(EventMetricDefinition.TryGetValue(typeof(UserMultipleEventObject), out metricDefinition)); // Now check for interfaces... Assert.IsTrue(EventMetricDefinition.TryGetValue(typeof(IEventMetricOne), out metricDefinition)); Assert.IsTrue(EventMetricDefinition.TryGetValue(typeof(IEventMetricTwo), out metricDefinition)); Assert.IsTrue(EventMetricDefinition.TryGetValue(typeof(IEventMetricThree), out metricDefinition)); Assert.IsTrue(EventMetricDefinition.TryGetValue(typeof(IEventMetricFour), out metricDefinition)); // And sample all of them on the big object with a single call... EventMetric.Write(bigDataObject); }
private void SynchronizedMetricRegistration() { string name = Thread.CurrentThread.Name; Trace.TraceInformation("{0} started", name); UserEventCollisionClass userObject = new UserEventCollisionClass(name); try { Interlocked.Increment(ref m_ThreadCounter); lock (m_SyncLock) { // Do nothing, just release it immediately. } EventMetric.Register(userObject); Trace.TraceInformation("{0} completed registration of event metric", name); userObject.ApplyDelta(Thread.CurrentThread.ManagedThreadId); } catch (Exception ex) { m_ThreadFailed = true; Trace.TraceError("{0} got {1}: {2}", name, ex.GetType().Name, ex.Message, ex); } Interlocked.Decrement(ref m_ThreadCounter); }
private void StopAndRecordMetric() { //record our end time if (m_Timer == null) { m_Duration = new TimeSpan(0); } else { m_Timer.Stop(); m_Duration = m_Timer.Elapsed; } //Get the METRIC DEFINITION IMetricDefinition metricDefinition; EventMetricDefinition eventDefinition; if (Log.Metrics.TryGetValue(MetricTypeName, m_Category, MetricCounterName, out metricDefinition) == false) { //it doesn't exist yet - add it eventDefinition = new EventMetricDefinition(MetricTypeName, m_Category, MetricCounterName); eventDefinition.Description = MetricDefinitionDescription; EventMetricValueDefinitionCollection valueDefinitionCollection = (EventMetricValueDefinitionCollection)eventDefinition.Values; valueDefinitionCollection.Add("operationname", typeof(string), "Operation Name", "The operation that was executed."); valueDefinitionCollection.Add("duration", typeof(TimeSpan), "Duration", "The duration the operation executed."); ((EventMetricValueDefinition)eventDefinition.Values["duration"]).UnitCaption = "Milliseconds"; eventDefinition.DefaultValue = eventDefinition.Values["duration"]; //and don't forget to register it! eventDefinition = eventDefinition.Register(); } else { eventDefinition = (EventMetricDefinition)metricDefinition; } //Get the METRIC IMetric metric; EventMetric eventMetric; if (eventDefinition.Metrics.TryGetValue(null, out metric) == false) { eventMetric = new EventMetric(eventDefinition, (string)null); } else { eventMetric = (EventMetric)metric; } //and finally we can RECORD THE SAMPLE. EventMetricSample metricSample = eventMetric.CreateSample(); metricSample.SetValue("operationname", OperationName); metricSample.SetValue("duration", Duration); metricSample.Write(); }
public void EventMetricsByMethodsPerformanceTest() { EventMetricDefinition eventDefinition; if (false == EventMetricDefinition.TryGetValue("PerformanceTestsMetrics", "Performance.EventMetrics.Methods", "UserEvent", out eventDefinition)) { eventDefinition = new EventMetricDefinition("PerformanceTestsMetrics", "Performance.EventMetrics.Methods", "UserEvent"); eventDefinition.Caption = "User Event"; eventDefinition.Description = "Unit test event metric with typical data."; eventDefinition.AddValue("fileName", typeof(string), SummaryFunction.Count, null, "File name", "The name of the file"); eventDefinition.AddValue("operation", typeof(UserFileOperation), SummaryFunction.Count, null, "Operation", "The type of file operation being performed."); eventDefinition.AddValue("duration", typeof(TimeSpan), SummaryFunction.Average, "ms", "Duration", "The duration for this file operation."); EventMetricDefinition.Register(ref eventDefinition, "duration"); } Assert.IsNotNull(eventDefinition); Assert.IsTrue(eventDefinition.IsReadOnly); Trace.TraceInformation("Event metric definition registered by methods."); EventMetric eventMetric = EventMetric.Register(eventDefinition, "MethodsPerformanceTest"); Assert.IsNotNull(eventMetric); string fileName = @"C:\Dummy\File\Name.txt"; DateTimeOffset operationStart = DateTimeOffset.UtcNow; DateTimeOffset operationEnd = operationStart.AddMilliseconds(1234); //first, lets get everything to flush so we have our best initial state. Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Preparing for Test", "Flushing queue"); //now that we know it's flushed everything, lets do our timed loop. DateTimeOffset startTime = DateTimeOffset.UtcNow; for (int curMessage = 0; curMessage < LoopsPerEventTest; curMessage++) { EventMetricSample eventSample = eventMetric.CreateSample(); eventSample.SetValue("fileName", fileName); eventSample.SetValue("operation", UserFileOperation.Write); eventSample.SetValue("duration", operationEnd - operationStart); eventSample.Write(); } DateTimeOffset messageEndTime = DateTimeOffset.UtcNow; //one wait for commit message to force the buffer to flush. Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Waiting for Samples to Commit", null); //and store off our time DateTimeOffset endTime = DateTimeOffset.UtcNow; TimeSpan testDuration = endTime - startTime; TimeSpan loopDuration = messageEndTime - startTime; const int messagesPerTest = LoopsPerEventTest * MessagesPerEventLoop; Trace.TraceInformation("Event Metrics by Methods Test committed {0:N0} events in {1:F3} ms (average {2:F4} ms per message). Average loop time {3:F4} ms ({4} values per message) and final flush time {5:F3} ms.", messagesPerTest, testDuration.TotalMilliseconds, (testDuration.TotalMilliseconds / messagesPerTest), (loopDuration.TotalMilliseconds / LoopsPerEventTest), ValuesPerEventMessage, (endTime - messageEndTime).TotalMilliseconds); }
public void Record(DataReaderDisposingEventData eventData) { OnStop(); Duration = eventData.Duration; Rows = eventData.ReadCount; EventMetric.Write(this); }
public void Record(CommandErrorEventData eventData) { OnStop(); Duration = eventData.Duration; Result = eventData.Exception?.GetType().Name; EventMetric.Write(this); }
public void Setup() { IMetricDefinition newMetricDefinition; EventMetricDefinition newEventMetricDefinition; // See if we already created the event metric we need if (Log.Metrics.TryGetValue("GeneralMetricCollectionTests", "Gibraltar.Monitor.Test", "Manual", out newMetricDefinition) == false) { // Didn't find it, so define an event metric manually (the hard way) newEventMetricDefinition = new EventMetricDefinition("GeneralMetricCollectionTests", "Gibraltar.Monitor.Test", "Manual"); newMetricDefinition = newEventMetricDefinition; // cast it as the base type, too // we now have a minimal definition, but we probably want to add a few attributes to make it useful // NOTE: This is designed to exactly match UserDataObject for convenience in analzing results. EventMetricValueDefinitionCollection valueDefinitions = (EventMetricValueDefinitionCollection)newEventMetricDefinition.Values; valueDefinitions.Add("short_average", typeof(short), "Short Average", "Data of type Short").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("short_sum", typeof(short), "Short Sum", "Data of type Short").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("ushort_average", typeof(ushort), "UShort Average", "Data of type UShort").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("ushort_sum", typeof(ushort), "UShort Sum", "Data of type UShort").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("int_average", typeof(int), "Int Average", "Data of type Int").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("int_sum", typeof(int), "Int Sum", "Data of type Int").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("uint_average", typeof(uint), "UInt Average", "Data of type UInt").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("uint_sum", typeof(uint), "UInt Sum", "Data of type UInt").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("long_average", typeof(long), "Long Average", "Data of type Long").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("long_sum", typeof(long), "Long Sum", "Data of type Long").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("ulong_average", typeof(ulong), "ULong Average", "Data of type ULong").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("ulong_sum", typeof(ulong), "ULong Sum", "Data of type ULong").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("decimal_average", typeof(decimal), "Decimal Average", "Data of type Decimal").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("decimal_sum", typeof(decimal), "Decimal Sum", "Data of type Decimal").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("double_average", typeof(double), "Double Average", "Data of type Double").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("double_sum", typeof(double), "Double Sum", "Data of type Double").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("float_average", typeof(float), "Float Average", "Data of type Float").DefaultTrend = EventMetricValueTrend.Average; valueDefinitions.Add("float_sum", typeof(float), "Float Sum", "Data of type Float").DefaultTrend = EventMetricValueTrend.Sum; valueDefinitions.Add("string", typeof(string), "String", "Data of type String"); valueDefinitions.Add("system.enum", typeof(System.Enum), "System.Enum", "Data of type System.Enum"); newEventMetricDefinition.DefaultValue = newEventMetricDefinition.Values["int_average"]; newEventMetricDefinition = newEventMetricDefinition.Register(); // Register it with the collection. } else { // Found one, try to cast it to the expected EventMetricDefinition type (raise exception if fails to match) newEventMetricDefinition = (EventMetricDefinition)newMetricDefinition; } IMetric newMetric; if (newMetricDefinition.Metrics.TryGetValue(null, out newMetric) == false) { // go ahead and add this new metric newMetric = new EventMetric(newEventMetricDefinition, (string)null); //add the default metric. } Assert.IsNotNull(newMetricDefinition); Assert.IsNotNull(newEventMetricDefinition); Assert.IsNotNull(newMetric); }
/// <summary> /// Records the metrics for this request /// </summary> public void Record() { _timer.Stop(); if (!Suppress) { EventMetric.Write(this); } }
public void RecordEventMetricPerformanceTest() { // Internally we want to make this comparable to the reflection test, just varying the part that uses reflection. EventMetricDefinition metricDefinition; Assert.IsTrue(EventMetricDefinition.TryGetValue("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", out metricDefinition)); Assert.IsNotNull(metricDefinition); EventMetric thisExperimentMetric = EventMetric.Register(metricDefinition, "RecordEventMetricPerformanceTest"); Assert.IsNotNull(thisExperimentMetric); // We're going to write out a BUNCH of samples... Trace.TraceInformation("Starting performance test"); DateTime curTime = DateTime.Now; //for timing how fast we are int curSample; for (curSample = 0; curSample < 32000; curSample++) { EventMetricSample newSample = thisExperimentMetric.CreateSample(); newSample.SetValue("short_average", curSample); newSample.SetValue("short_sum", curSample); newSample.SetValue("short_runningaverage", curSample); newSample.SetValue("short_runningsum", curSample); newSample.SetValue("ushort_average", (ushort)curSample); newSample.SetValue("ushort_sum", (ushort)curSample); newSample.SetValue("int_average", curSample); newSample.SetValue("int_sum", curSample); newSample.SetValue("uint_average", (uint)curSample); newSample.SetValue("uint_sum", (uint)curSample); newSample.SetValue("long_average", curSample); newSample.SetValue("long_sum", curSample); newSample.SetValue("ulong_average", (ulong)curSample); newSample.SetValue("ulong_sum", (ulong)curSample); newSample.SetValue("decimal_average", curSample); newSample.SetValue("decimal_sum", curSample); newSample.SetValue("double_average", curSample); newSample.SetValue("double_sum", curSample); newSample.SetValue("float_average", curSample); newSample.SetValue("float_sum", curSample); newSample.SetValue("timespan_average", new TimeSpan(curSample)); newSample.SetValue("timespan_sum", new TimeSpan(curSample)); newSample.SetValue("timespan_runningaverage", new TimeSpan(curSample)); newSample.SetValue("timespan_runningsum", new TimeSpan(curSample)); newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", curSample)); newSample.SetValue("system.enum", (UserDataEnumeration)curSample); newSample.Write(); //only now does it get written because we had to wait until you populated the metrics } TimeSpan duration = DateTime.Now - curTime; Trace.TraceInformation("Completed performance test in {0} milliseconds for {1} samples", duration.TotalMilliseconds, curSample); Log.Verbose(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.EventMetric.Methods", "Event Metrics performance test flush", null); }
/// <summary> /// Create an event metric sample packet for live data collection /// </summary> /// <param name="metric">The metric this sample is for</param> public EventMetricSamplePacket(EventMetric metric) : base(metric) { //create a new sample values collection the correct size of our metric's values collection Values = new object[metric.Definition.Values.Count]; m_ValueDefinitions = (EventMetricValueDefinitionCollection)metric.Definition.Values; //and set our default dynamic type name based on our metric definition. It isn't clear to me //that there's really a contract that it won't be changed by the serializer, so we allow it to be DynamicTypeName = metric.Definition.Name; }
public void Clean_WithMultipleRegistriesInSeparateContexts_RemovesEvents() { ResetCleaner(); const string ctx1 = "ctx1"; var registry1 = new DefaultMetricsRegistry(); EventMetricsCleaner.ContextRegistries.Add(ctx1, registry1); const string ctx2 = "ctx2"; var registry2 = new DefaultMetricsRegistry(); EventMetricsCleaner.ContextRegistries.Add(ctx2, registry2); var timer = new MockTimer(); EventMetricsCleaner.EnableTestTimer(timer); var reportIndex1 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var metric1 = new EventMetric(); registry1.Event("test1", () => { return(metric1); }, MetricTags.None); metric1.Record(); metric1.Record(); metric1.Record(); EventMetricsCleaner.UpdateTotalReportedEvents(reportIndex1, registry1.DataProvider.Events); var reportIndex2 = EventMetricsCleaner.RegisterReport(new TimeSpan(0, 0, 0, 60)); var metric2 = new EventMetric(); registry2.Event("test2", () => { return(metric2); }, MetricTags.None); metric2.Record(); metric2.Record(); EventMetricsCleaner.UpdateTotalReportedEvents(reportIndex2, registry2.DataProvider.Events); var registryCounts = GetRegistryEventDetailCounts("test1.event"); registryCounts[ctx1].Should().Be(3); registryCounts[ctx2].Should().Be(0); registryCounts = GetRegistryEventDetailCounts("test2.event"); registryCounts[ctx1].Should().Be(0); registryCounts[ctx2].Should().Be(2); timer.OnTimerCallback(); registryCounts = GetRegistryEventDetailCounts("test1.event"); registryCounts[ctx1].Should().Be(0); registryCounts[ctx2].Should().Be(0); registryCounts = GetRegistryEventDetailCounts("test2.event"); registryCounts[ctx1].Should().Be(0); registryCounts[ctx2].Should().Be(0); }
/// <summary> /// Perform the one-time work to create a metric /// </summary> /// <param name="name">Dot-delimited display name for this metric</param> private void InitializeMetric(string name) { // Handle edge cases if (string.IsNullOrEmpty(name)) { name = DefaultInstance; } name = name.Trim(); while (!string.IsNullOrEmpty(name) && name[0] == '.') { name = name.Substring(1).Trim(); } // Set up category and instance as Loupe wants string category; string instance; var pos = name.LastIndexOf('.'); // check for delimited name if (pos <= 0) { // If not deliminated, just use base category category = RootCategory; instance = name; } else { // If delimited, just use the last part as instance name // and combine the rest with category category = RootCategory + '.' + name.Substring(0, pos); instance = name.Substring(pos + 1); } _logCategory = category + "." + instance; // Initializ category for logging EventMetricDefinition metricDefinition; // Create the metric on first call then use cached copy thereafter if (!EventMetricDefinition.TryGetValue(MetricSystem, category, instance, out metricDefinition)) { metricDefinition = new EventMetricDefinition(MetricSystem, category, instance); metricDefinition.AddValue(DurationCaption, typeof(TimeSpan), SummaryFunction.Average, null, DurationCaption, null); EventMetricDefinition.Register(ref metricDefinition); } // Grab the metric from cache Metric = EventMetric.Register(metricDefinition, null); }
public void MetricStringKeyTrimming() { MetricDefinition testMetricDefinition = GetTestMetricDefinition(); Metric lookupMetric = EventMetric.AddOrGet((EventMetricDefinition)testMetricDefinition, "MetricStringKeyTrimming"); //this test we already passed, so now we use it to do the rest of our tests. //Now try to get it using each key element with extra white space. IMetric testMetric = testMetricDefinition.Metrics[string.Format(CultureInfo.InvariantCulture, " {0} ", lookupMetric.InstanceName)]; Assert.IsNotNull(testMetric); Assert.AreSame(lookupMetric, testMetric); testMetric = testMetricDefinition.Metrics[string.Format(CultureInfo.InvariantCulture, " {0} ", lookupMetric.Name)]; Assert.IsNotNull(testMetric); Assert.AreSame(lookupMetric, testMetric); }
/// <summary> /// Constructs and instance of <see cref="LoupeMiddleware"/>. /// </summary> /// <param name="next"></param> /// <param name="agent"></param> /// <param name="applicationLifetime"></param> /// <exception cref="ArgumentNullException"></exception> public LoupeMiddleware(RequestDelegate next, LoupeAgent agent, IApplicationLifetime applicationLifetime) { _next = next ?? throw new ArgumentNullException(nameof(next)); _agent = agent ?? throw new ArgumentNullException(nameof(agent)); if (applicationLifetime == null) { throw new ArgumentNullException(nameof(applicationLifetime)); } applicationLifetime.ApplicationStarted.Register(StartSession); applicationLifetime.ApplicationStopped.Register(OnApplicationStopping); var requestMetricDefinition = DefineRequestMetric(agent.ApplicationName); _requestMetric = EventMetric.Register(requestMetricDefinition, null); }
public void PrettySampleDataOverTimeTest() { UserEventObject myDataObject = new UserEventObject("Pretty Data"); EventMetric.Register(myDataObject); //do a set of 20 samples with a gap between each. for (short curSample = 0; curSample < 20; curSample++) { myDataObject.SetValues(curSample); EventMetric.Write(myDataObject); //now sleep for a little to make a nice gap. This has to be >> 16 ms to show a reasonable gap. Thread.Sleep(200); } }
public void RecordEventMetric() { // Internally we want to make this comparable to the reflection test, just varying the part that uses reflection. EventMetricDefinition metricDefinition; Assert.IsTrue(EventMetricDefinition.TryGetValue("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", out metricDefinition)); Assert.IsNotNull(metricDefinition); EventMetric thisExperimentMetric = EventMetric.Register(metricDefinition, "RecordEventMetric"); Assert.IsNotNull(thisExperimentMetric); // To write a sample manually, we must first create an empty sample for this event metric instance. EventMetricSample newSample = thisExperimentMetric.CreateSample(); // Then we set the values. newSample.SetValue("short_average", 1); newSample.SetValue("short_sum", 1); newSample.SetValue("short_runningaverage", 1); newSample.SetValue("short_runningsum", 1); newSample.SetValue("ushort_average", (ushort)1); newSample.SetValue("ushort_sum", (ushort)1); newSample.SetValue("int_average", 1); newSample.SetValue("int_sum", 1); newSample.SetValue("uint_average", (uint)1); newSample.SetValue("uint_sum", (uint)1); newSample.SetValue("long_average", 1); newSample.SetValue("long_sum", 1); newSample.SetValue("ulong_average", (ulong)1); newSample.SetValue("ulong_sum", (ulong)1); newSample.SetValue("decimal_average", 1); newSample.SetValue("decimal_sum", 1); newSample.SetValue("double_average", 1); newSample.SetValue("double_sum", 1); newSample.SetValue("float_average", 1); newSample.SetValue("float_sum", 1); newSample.SetValue("timespan_average", new TimeSpan(1)); newSample.SetValue("timespan_sum", new TimeSpan(1)); newSample.SetValue("timespan_runningaverage", new TimeSpan(1)); newSample.SetValue("timespan_runningsum", new TimeSpan(1)); newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", 1)); newSample.SetValue("system.enum", (UserDataEnumeration)1); // And finally, tell the sample to write itself to the Gibraltar log. newSample.Write(); }
private void HandleConnection(string name, ConnectionEndEventData eventData) { ConnectionMetric metric; if (name == RelationalEventId.ConnectionOpened.Name) { metric = new ConnectionMetric(eventData) { Action = "Open", ConnectionDelta = 1, Duration = eventData.Duration }; _connectionNames.TryAdd(eventData.ConnectionId, metric.InstanceName); } else if (name == RelationalEventId.ConnectionClosed.Name) { // *sigh*. We've found the server names don't always match between open and close //so look up our cached value.. if (_connectionNames.TryRemove(eventData.ConnectionId, out string instanceName)) { metric = new ConnectionMetric(eventData, instanceName) { Action = "Closed", ConnectionDelta = -1, Duration = eventData.Duration }; } else { // we ignore the else clause because it's not a "matching" event. return; } } else { return; } EventMetric.Write(metric); SampledMetric.Write(metric); }
/// <summary> /// Record the request completion information /// </summary> /// <param name="context"></param> /// <remarks>Writes an event metric for the request if metrics are enabled</remarks> public void Record(HttpContext context) { var response = context.Response; ResponseCode = response.StatusCode; if (Options.Enabled == false) { return; } if (Options.LogRequests) { OnLogRequestCompletion(); } if (Options.LogRequestMetrics) { EventMetric.Write(this); } }
private void RecordStatusMetric(string ipAddress, bool isAccessible, bool accessibleChanged, long latency) { //first our sampled metric for latency... var metric = SampledMetric.Register("Loupe", MetricCategory, "latency", SamplingType.RawCount, "ms", "Latency", "The latency of the connection to this endpoint (if available)", ipAddress); if (isAccessible) { metric.WriteSample(latency); } else { //write a zero latency sample, but credit it to the timestamp where we initiated not now. metric.WriteSample(0, DateTimeOffset.Now.AddMilliseconds(-1 * latency)); } if (accessibleChanged) { EventMetric.Write(new ConnectivityEventMetric(ipAddress, isAccessible)); } }
/// <summary> /// Performs application-defined tasks associated with freeing, releasing, or resetting unmanaged resources. /// </summary> /// <filterpriority>2</filterpriority> public void Dispose() { try { //Close out our duration and log the final result. All other timers should have been stopped symmetrically. TotalDuration = StopAndRecordDuration(m_RequestTimer); //and now you can log us. if (m_Suppressed == false) { EventMetric.Write(this); } } catch (Exception ex) { GC.KeepAlive(ex); #if DEBUG Log.RecordException(ex, "System", true); #endif } }
public void EventMetricsByAttributesPerformanceTest() { EventMetric.Register(typeof(UserPerformanceObject)); Trace.TraceInformation("Event metrics registered by attributes."); UserPerformanceObject eventObject = new UserPerformanceObject("AttributesPerformanceTest"); DateTimeOffset operationStart = DateTimeOffset.UtcNow; DateTimeOffset operationEnd = operationStart.AddMilliseconds(1234); eventObject.SetEventData(@"C:\Dummy\File\Name.txt", UserFileOperation.Write, operationStart, operationEnd); //first, lets get everything to flush so we have our best initial state. Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Preparing for Test", "Flushing queue"); //now that we know it's flushed everything, lets do our timed loop. DateTimeOffset startTime = DateTimeOffset.UtcNow; for (int curMessage = 0; curMessage < LoopsPerEventTest; curMessage++) { EventMetricDefinition.Write(eventObject); } DateTimeOffset messageEndTime = DateTimeOffset.UtcNow; //one wait for commit message to force the buffer to flush. Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Waiting for Samples to Commit", null); //and store off our time DateTimeOffset endTime = DateTimeOffset.UtcNow; TimeSpan testDuration = endTime - startTime; TimeSpan loopDuration = messageEndTime - startTime; const int messagesPerTest = LoopsPerEventTest * MessagesPerEventLoop; Trace.TraceInformation("Event Metrics by Attributes Test committed {0:N0} events in {1:F3} ms (average {2:F4} ms per message). Average loop time {3:F4} ms ({4} values per message) and final flush time {5:F3} ms.", messagesPerTest, testDuration.TotalMilliseconds, (testDuration.TotalMilliseconds / messagesPerTest), (loopDuration.TotalMilliseconds / LoopsPerEventTest), ValuesPerEventMessage, (endTime - messageEndTime).TotalMilliseconds); }
public void RecordEventMetric() { //Internally we want to make this comparable to the reflection test, just varying the part that use reflection. EventMetric thisExperimentMetric = EventMetric.AddOrGet("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", "RecordEventMetric"); //write out one sample EventMetricSample newSample = thisExperimentMetric.CreateSample(); newSample.SetValue("short_average", 1); newSample.SetValue("short_sum", 1); newSample.SetValue("short_runningaverage", 1); newSample.SetValue("short_runningsum", 1); newSample.SetValue("ushort_average", (ushort)1); newSample.SetValue("ushort_sum", (ushort)1); newSample.SetValue("int_average", 1); newSample.SetValue("int_sum", 1); newSample.SetValue("uint_average", (uint)1); newSample.SetValue("uint_sum", (uint)1); newSample.SetValue("long_average", 1); newSample.SetValue("long_sum", 1); newSample.SetValue("ulong_average", (ulong)1); newSample.SetValue("ulong_sum", (ulong)1); newSample.SetValue("decimal_average", 1); newSample.SetValue("decimal_sum", 1); newSample.SetValue("double_average", 1); newSample.SetValue("double_sum", 1); newSample.SetValue("float_average", 1); newSample.SetValue("float_sum", 1); newSample.SetValue("timespan_average", new TimeSpan(1)); newSample.SetValue("timespan_sum", new TimeSpan(1)); newSample.SetValue("timespan_runningaverage", new TimeSpan(1)); newSample.SetValue("timespan_runningsum", new TimeSpan(1)); newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", 1)); newSample.SetValue("system.enum", (UserDataEnumeration)1); newSample.Write(); //only now does it get written because we had to wait until you populated the metrics }
/// <summary> /// Helper method to store the metric data /// </summary> private void CreateSample(EventMetric metric, TimeSpan duration) { EventMetricSample sample = metric.CreateSample(); sample.SetValue("duration", duration); sample.SetValue("namespace", _namespace); sample.SetValue("class", ClassName); sample.SetValue("method", MethodName); sample.SetValue("fullname", _namespace + "." + QualifiedMethodName); sample.Write(); }
/// <summary> /// Records the metrics for this request /// </summary> public void Record() { _timer.Stop(); EventMetric.Write(this); }