Exemplo n.º 1
0
        /// <summary>
        /// Record an event metric using a programmatic declaration
        /// </summary>
        /// <param name="pagesLoaded"></param>
        public static void RecordCacheMetric(int pagesLoaded)
        {
            EventMetricDefinition cacheMetric;

            //so we can be called multiple times we want to see if the definition already exists.
            if (EventMetricDefinition.TryGetValue("GibraltarSample", "Database.Engine", "Cache", out cacheMetric) == false)
            {
                cacheMetric = new EventMetricDefinition("GibraltarSample", "Database.Engine", "Cache");

                //add the values (that are part of the definition)
                cacheMetric.AddValue("pages", typeof(int), SummaryFunction.Average, "Pages", "Pages in Cache", "Total number of pages in cache");
                cacheMetric.AddValue("size", typeof(int), SummaryFunction.Average, "Bytes", "Cache Size", "Total number of bytes used by pages in cache");

                //and now that we're done, we need to register this definition.  This locks the definition
                //and makes it go live.  Note that it's based by ref because if another thread registered the same metric, we'll get the
                //registered object (whoever one the race), not necessarily the one we've just created to pass in.
                EventMetricDefinition.Register(ref cacheMetric);
            }

            //Now we can get the specific metric we want to record samples under (this is an instance of the definition)
            EventMetric cacheEventMetric = EventMetric.Register(cacheMetric, null);

            //now go ahead and write that sample.
            EventMetricSample newSample = cacheEventMetric.CreateSample();

            newSample.SetValue("pages", pagesLoaded);
            newSample.SetValue("size", pagesLoaded * 8196);
            newSample.Write();
        }
Exemplo n.º 2
0
        public void EventMetricsByMethodsPerformanceTest()
        {
            EventMetricDefinition eventDefinition;

            if (false == EventMetricDefinition.TryGetValue("PerformanceTestsMetrics", "Performance.EventMetrics.Methods", "UserEvent", out eventDefinition))
            {
                eventDefinition             = new EventMetricDefinition("PerformanceTestsMetrics", "Performance.EventMetrics.Methods", "UserEvent");
                eventDefinition.Caption     = "User Event";
                eventDefinition.Description = "Unit test event metric with typical data.";
                eventDefinition.AddValue("fileName", typeof(string), SummaryFunction.Count, null, "File name", "The name of the file");
                eventDefinition.AddValue("operation", typeof(UserFileOperation), SummaryFunction.Count, null, "Operation", "The type of file operation being performed.");
                eventDefinition.AddValue("duration", typeof(TimeSpan), SummaryFunction.Average, "ms", "Duration", "The duration for this file operation.");
                EventMetricDefinition.Register(ref eventDefinition, "duration");
            }

            Assert.IsNotNull(eventDefinition);
            Assert.IsTrue(eventDefinition.IsReadOnly);

            Trace.TraceInformation("Event metric definition registered by methods.");

            EventMetric eventMetric = EventMetric.Register(eventDefinition, "MethodsPerformanceTest");

            Assert.IsNotNull(eventMetric);

            string         fileName       = @"C:\Dummy\File\Name.txt";
            DateTimeOffset operationStart = DateTimeOffset.UtcNow;
            DateTimeOffset operationEnd   = operationStart.AddMilliseconds(1234);

            //first, lets get everything to flush so we have our best initial state.
            Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Preparing for Test", "Flushing queue");

            //now that we know it's flushed everything, lets do our timed loop.
            DateTimeOffset startTime = DateTimeOffset.UtcNow;

            for (int curMessage = 0; curMessage < LoopsPerEventTest; curMessage++)
            {
                EventMetricSample eventSample = eventMetric.CreateSample();
                eventSample.SetValue("fileName", fileName);
                eventSample.SetValue("operation", UserFileOperation.Write);
                eventSample.SetValue("duration", operationEnd - operationStart);
                eventSample.Write();
            }
            DateTimeOffset messageEndTime = DateTimeOffset.UtcNow;

            //one wait for commit message to force the buffer to flush.
            Log.Information(LogWriteMode.WaitForCommit, "Test.Agent.Metrics.Performance", "Waiting for Samples to Commit", null);

            //and store off our time
            DateTimeOffset endTime = DateTimeOffset.UtcNow;

            TimeSpan  testDuration    = endTime - startTime;
            TimeSpan  loopDuration    = messageEndTime - startTime;
            const int messagesPerTest = LoopsPerEventTest * MessagesPerEventLoop;

            Trace.TraceInformation("Event Metrics by Methods Test committed {0:N0} events in {1:F3} ms (average {2:F4} ms per message).  Average loop time {3:F4} ms ({4} values per message) and final flush time {5:F3} ms.",
                                   messagesPerTest, testDuration.TotalMilliseconds, (testDuration.TotalMilliseconds / messagesPerTest),
                                   (loopDuration.TotalMilliseconds / LoopsPerEventTest), ValuesPerEventMessage,
                                   (endTime - messageEndTime).TotalMilliseconds);
        }
Exemplo n.º 3
0
        private void StopAndRecordMetric()
        {
            //record our end time
            if (m_Timer == null)
            {
                m_Duration = new TimeSpan(0);
            }
            else
            {
                m_Timer.Stop();
                m_Duration = m_Timer.Elapsed;
            }

            //Get the METRIC DEFINITION
            IMetricDefinition     metricDefinition;
            EventMetricDefinition eventDefinition;

            if (Log.Metrics.TryGetValue(MetricTypeName, m_Category, MetricCounterName, out metricDefinition) == false)
            {
                //it doesn't exist yet - add it
                eventDefinition             = new EventMetricDefinition(MetricTypeName, m_Category, MetricCounterName);
                eventDefinition.Description = MetricDefinitionDescription;

                EventMetricValueDefinitionCollection valueDefinitionCollection = (EventMetricValueDefinitionCollection)eventDefinition.Values;
                valueDefinitionCollection.Add("operationname", typeof(string), "Operation Name", "The operation that was executed.");

                valueDefinitionCollection.Add("duration", typeof(TimeSpan), "Duration", "The duration the operation executed.");
                ((EventMetricValueDefinition)eventDefinition.Values["duration"]).UnitCaption = "Milliseconds";
                eventDefinition.DefaultValue = eventDefinition.Values["duration"];

                //and don't forget to register it!
                eventDefinition = eventDefinition.Register();
            }
            else
            {
                eventDefinition = (EventMetricDefinition)metricDefinition;
            }

            //Get the METRIC
            IMetric     metric;
            EventMetric eventMetric;

            if (eventDefinition.Metrics.TryGetValue(null, out metric) == false)
            {
                eventMetric = new EventMetric(eventDefinition, (string)null);
            }
            else
            {
                eventMetric = (EventMetric)metric;
            }


            //and finally we can RECORD THE SAMPLE.
            EventMetricSample metricSample = eventMetric.CreateSample();

            metricSample.SetValue("operationname", OperationName);
            metricSample.SetValue("duration", Duration);
            metricSample.Write();
        }
        private void SynchronizedMetricRegistration()
        {
            string name = Thread.CurrentThread.Name;

            Trace.TraceInformation("{0} started", name);
            EventMetricDefinition newDefinition = new EventMetricDefinition("EventMetricTests", "Gibraltar.Monitor.Test", "Sync");

            newDefinition.AddValue("delta", typeof(double), SummaryFunction.RunningSum, null, "Delta", "The applied delta");

            try
            {
                Interlocked.Increment(ref m_ThreadCounter);
                lock (m_SyncLock)
                {
                    // Do nothing, just release it immediately.
                }

                EventMetricDefinition.Register(ref newDefinition);

                EventMetric metric = EventMetric.Register(newDefinition, name);

                Trace.TraceInformation("{0} completed registration of event metric", name);

                EventMetricSample sample = metric.CreateSample();
                sample.SetValue("delta", Thread.CurrentThread.ManagedThreadId);
                sample.Write();
            }
            catch (Exception ex)
            {
                m_ThreadFailed = true;
                Trace.TraceError("{0} got {1}: {2}", name, ex.GetType().Name, ex.Message, ex);
            }

            Interlocked.Decrement(ref m_ThreadCounter);
        }
Exemplo n.º 5
0
        /// <summary>
        /// Write a metric sample
        /// </summary>
        private void WriteMetric()
        {
            EventMetricSample sample = Metric.CreateSample();
            var elapsed = Timer.Elapsed;

            sample.SetValue(DurationCaption, elapsed);
            sample.Write();

            if (WarningTimeSpan.HasValue && WarningTimeSpan.Value > TimeSpan.Zero && WarningTimeSpan.Value < elapsed)
            {
                var threshold   = WarningTimeSpan.Value.TotalSeconds;
                var caption     = _logCategory + " exceeds " + Math.Round(threshold, MaxDecimalDigits) + " seconds";
                var description = "Elapsed time = " + Math.Round(elapsed.TotalSeconds, MaxDecimalDigits) + " seconds";
                Log.Warning(null, _logCategory, caption, description);
            }
        }
        public void RecordEventMetric()
        {
            //Internally we want to make this comparable to the reflection test, just varying the part that use reflection.
            EventMetric thisExperimentMetric = EventMetric.AddOrGet("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", "RecordEventMetric");

            //write out one sample
            EventMetricSample newSample = thisExperimentMetric.CreateSample();

            newSample.SetValue("short_average", 1);
            newSample.SetValue("short_sum", 1);
            newSample.SetValue("short_runningaverage", 1);
            newSample.SetValue("short_runningsum", 1);
            newSample.SetValue("ushort_average", (ushort)1);
            newSample.SetValue("ushort_sum", (ushort)1);
            newSample.SetValue("int_average", 1);
            newSample.SetValue("int_sum", 1);
            newSample.SetValue("uint_average", (uint)1);
            newSample.SetValue("uint_sum", (uint)1);
            newSample.SetValue("long_average", 1);
            newSample.SetValue("long_sum", 1);
            newSample.SetValue("ulong_average", (ulong)1);
            newSample.SetValue("ulong_sum", (ulong)1);
            newSample.SetValue("decimal_average", 1);
            newSample.SetValue("decimal_sum", 1);
            newSample.SetValue("double_average", 1);
            newSample.SetValue("double_sum", 1);
            newSample.SetValue("float_average", 1);
            newSample.SetValue("float_sum", 1);
            newSample.SetValue("timespan_average", new TimeSpan(1));
            newSample.SetValue("timespan_sum", new TimeSpan(1));
            newSample.SetValue("timespan_runningaverage", new TimeSpan(1));
            newSample.SetValue("timespan_runningsum", new TimeSpan(1));
            newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", 1));
            newSample.SetValue("system.enum", (UserDataEnumeration)1);
            newSample.Write(); //only now does it get written because we had to wait until you populated the metrics
        }
        public void RecordEventMetricPerformanceTest()
        {
            //Internally we want to make this comparable to the reflection test, just varying the part that use reflection.
            EventMetric thisExperimentMetric =
                EventMetric.AddOrGet("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", "RecordEventMetricPerformanceTest");

            //and we're going to write out a BUNCH of samples
            Trace.TraceInformation("Starting performance test");
            DateTime curTime = DateTime.Now; //for timing how fast we are

            for (int curSample = 0; curSample < 32000; curSample++)
            {
                EventMetricSample newSample = thisExperimentMetric.CreateSample();
                newSample.SetValue("short_average", curSample);
                newSample.SetValue("short_sum", curSample);
                newSample.SetValue("short_runningaverage", curSample);
                newSample.SetValue("short_runningsum", curSample);
                newSample.SetValue("ushort_average", (ushort)curSample);
                newSample.SetValue("ushort_sum", (ushort)curSample);
                newSample.SetValue("int_average", curSample);
                newSample.SetValue("int_sum", curSample);
                newSample.SetValue("uint_average", (uint)curSample);
                newSample.SetValue("uint_sum", (uint)curSample);
                newSample.SetValue("long_average", curSample);
                newSample.SetValue("long_sum", curSample);
                newSample.SetValue("ulong_average", (ulong)curSample);
                newSample.SetValue("ulong_sum", (ulong)curSample);
                newSample.SetValue("decimal_average", curSample);
                newSample.SetValue("decimal_sum", curSample);
                newSample.SetValue("double_average", curSample);
                newSample.SetValue("double_sum", curSample);
                newSample.SetValue("float_average", curSample);
                newSample.SetValue("float_sum", curSample);
                newSample.SetValue("timespan_average", new TimeSpan(curSample));
                newSample.SetValue("timespan_sum", new TimeSpan(curSample));
                newSample.SetValue("timespan_runningaverage", new TimeSpan(curSample));
                newSample.SetValue("timespan_runningsum", new TimeSpan(curSample));
                newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", curSample));
                newSample.SetValue("system.enum", (UserDataEnumeration)curSample);

                newSample.Write(); //only now does it get written because we had to wait until you populated the metrics
            }
            TimeSpan duration = DateTime.Now - curTime;

            Trace.TraceInformation("Completed performance test in {0} milliseconds for 32,000 samples", duration.TotalMilliseconds);

            Log.Write(LogMessageSeverity.Verbose, LogWriteMode.WaitForCommit, null, "Unit Tests", "Event Metrics performance test flush", null);
        }
        public void RecordEventMetric()
        {
            // Internally we want to make this comparable to the reflection test, just varying the part that uses reflection.
            EventMetricDefinition metricDefinition;

            Assert.IsTrue(EventMetricDefinition.TryGetValue("EventMetricTests", "Gibraltar.Monitor.Test", "Manual", out metricDefinition));
            Assert.IsNotNull(metricDefinition);

            EventMetric thisExperimentMetric = EventMetric.Register(metricDefinition, "RecordEventMetric");

            Assert.IsNotNull(thisExperimentMetric);

            // To write a sample manually, we must first create an empty sample for this event metric instance.
            EventMetricSample newSample = thisExperimentMetric.CreateSample();

            // Then we set the values.
            newSample.SetValue("short_average", 1);
            newSample.SetValue("short_sum", 1);
            newSample.SetValue("short_runningaverage", 1);
            newSample.SetValue("short_runningsum", 1);
            newSample.SetValue("ushort_average", (ushort)1);
            newSample.SetValue("ushort_sum", (ushort)1);
            newSample.SetValue("int_average", 1);
            newSample.SetValue("int_sum", 1);
            newSample.SetValue("uint_average", (uint)1);
            newSample.SetValue("uint_sum", (uint)1);
            newSample.SetValue("long_average", 1);
            newSample.SetValue("long_sum", 1);
            newSample.SetValue("ulong_average", (ulong)1);
            newSample.SetValue("ulong_sum", (ulong)1);
            newSample.SetValue("decimal_average", 1);
            newSample.SetValue("decimal_sum", 1);
            newSample.SetValue("double_average", 1);
            newSample.SetValue("double_sum", 1);
            newSample.SetValue("float_average", 1);
            newSample.SetValue("float_sum", 1);
            newSample.SetValue("timespan_average", new TimeSpan(1));
            newSample.SetValue("timespan_sum", new TimeSpan(1));
            newSample.SetValue("timespan_runningaverage", new TimeSpan(1));
            newSample.SetValue("timespan_runningsum", new TimeSpan(1));
            newSample.SetValue("string", string.Format(CultureInfo.CurrentCulture, "The current manual sample is {0}", 1));
            newSample.SetValue("system.enum", (UserDataEnumeration)1);

            // And finally, tell the sample to write itself to the Gibraltar log.
            newSample.Write();
        }