private void Dynamic_All(TraceEvent obj) { // If we are paused, ignore the event. // There's a potential race here between the two tasks but not a huge deal if we miss by one event. if (pauseCmdSet) { return; } if (obj.EventName.Equals("EventCounters")) { IDictionary <string, object> payloadVal = (IDictionary <string, object>)(obj.PayloadValue(0)); IDictionary <string, object> payloadFields = (IDictionary <string, object>)(payloadVal["Payload"]); // If it's not a counter we asked for, ignore it. if (!filter.Filter(obj.ProviderName, payloadFields["Name"].ToString())) { return; } // There really isn't a great way to tell whether an EventCounter payload is an instance of // IncrementingCounterPayload or CounterPayload, so here we check the number of fields // to distinguish the two. ICounterPayload payload = payloadFields.Count == 6 ? (ICounterPayload) new IncrementingCounterPayload(payloadFields) : (ICounterPayload) new CounterPayload(payloadFields); writer.Update(obj.ProviderName, payload); } }
public async Task SnapshotMetrics(Stream outputStream, CancellationToken token) { Dictionary <MetricKey, Queue <ICounterPayload> > copy = null; lock (_allMetrics) { copy = new Dictionary <MetricKey, Queue <ICounterPayload> >(); foreach (var metricGroup in _allMetrics) { copy.Add(metricGroup.Key, new Queue <ICounterPayload>(metricGroup.Value)); } } using var writer = new StreamWriter(outputStream, new UTF8Encoding(encoderShouldEmitUTF8Identifier: false), bufferSize: 1024, leaveOpen: true); writer.NewLine = "\n"; foreach (var metricGroup in copy) { ICounterPayload metricInfo = metricGroup.Value.First(); string metricName = GetPrometheusMetric(metricInfo, out string metricValue); string metricType = "gauge"; //TODO Some clr metrics claim to be incrementing, but are really gauges. await writer.WriteLineAsync(FormattableString.Invariant($"# HELP {metricName} {metricInfo.DisplayName}")); await writer.WriteLineAsync(FormattableString.Invariant($"# TYPE {metricName} {metricType}")); foreach (var metric in metricGroup.Value) { await WriteMetricDetails(writer, metric, metricName, metricValue); } } }
public void Log(ICounterPayload counter) { //CONSIDER //Ideally this would be an asynchronous api, but making this async would extend the lifetime of writing to the stream //beyond the lifetime of the Counters pipeline. SerializeCounter(_stream, counter); }
private static async Task WriteMetricDetails( StreamWriter writer, ICounterPayload metric, string metricName, string metricValue) { await writer.WriteAsync(metricName); await writer.WriteLineAsync(FormattableString.Invariant($" {metricValue} {new DateTimeOffset(metric.Timestamp).ToUnixTimeMilliseconds()}")); }
private static void Dynamic_All(TraceEvent obj) { if (obj.EventName.Equals("EventCounters")) { IDictionary <string, object> payloadVal = (IDictionary <string, object>)(obj.PayloadValue(0)); IDictionary <string, object> payloadFields = (IDictionary <string, object>)(payloadVal["Payload"]); ICounterPayload payload = payloadFields.Count == 6 ? (ICounterPayload) new IncrementingCounterPayload(payloadFields) : (ICounterPayload) new CounterPayload(payloadFields); string displayName = payload.GetDisplay(); if (string.IsNullOrEmpty(displayName)) { displayName = payload.GetName(); } if (string.Compare(displayName, "GC Heap Size") == 0 && Convert.ToInt32(payload.GetValue()) > threshold) { Console.WriteLine("Memory threshold has been breached...."); System.Diagnostics.Process process = System.Diagnostics.Process.GetProcessById(pid); System.Diagnostics.ProcessModule coreclr = process.Modules.Cast <System.Diagnostics.ProcessModule>().FirstOrDefault(m => string.Equals(m.ModuleName, "libcoreclr.so")); if (coreclr == null) { Console.WriteLine("Unable to locate .NET runtime associated with this process!"); Environment.Exit(1); } else { string runtimeDirectory = Path.GetDirectoryName(coreclr.FileName); string createDumpPath = Path.Combine(runtimeDirectory, "createdump"); if (!File.Exists(createDumpPath)) { Console.WriteLine("Unable to locate 'createdump' tool in '{runtimeDirectory}'"); Environment.Exit(1); } var createdump = new System.Diagnostics.Process() { StartInfo = new System.Diagnostics.ProcessStartInfo() { FileName = createDumpPath, Arguments = $"--name coredump --withheap {pid}", }, EnableRaisingEvents = true, }; createdump.Start(); createdump.WaitForExit(); Environment.Exit(0); } } } }
public void Log(ICounterPayload metric) { string key = CreateKey(metric); _metrics[key] = metric; // Complete the task source if the last expected key was removed. if (_expectedCounters.Remove(key) && _expectedCounters.Count == 0) { _foundExpectedCountersSource.TrySetResult(null); } }
public static string GetDisplay(this ICounterPayload counterPayload) { if (counterPayload.CounterType == CounterType.Rate) { return($"{counterPayload.DisplayName} ({counterPayload.Unit} / {counterPayload.Interval} sec)"); } if (!string.IsNullOrEmpty(counterPayload.Unit)) { return($"{counterPayload.DisplayName} ({counterPayload.Unit})"); } return($"{counterPayload.DisplayName}"); }
public void CounterPayloadReceived(string providerName, ICounterPayload payload, bool _) { if (builder.Length > flushLength) { File.AppendAllText(_output, builder.ToString()); builder.Clear(); } builder.Append($"{{ \"timestamp\": \"{DateTime.Now.ToString("u")}\", "); builder.Append($" \"provider\": \"{providerName}\", "); builder.Append($" \"name\": \"{payload.GetDisplay()}\", "); builder.Append($" \"counterType\": \"{payload.GetCounterType()}\", "); builder.Append($" \"value\": {payload.GetValue()} }},"); }
public void CounterPayloadReceived(string providerName, ICounterPayload payload, bool _) { if (builder.Length > flushLength) { File.AppendAllText(_output, builder.ToString()); builder.Clear(); } builder.Append(DateTime.UtcNow.ToString() + ","); builder.Append(providerName + ","); builder.Append(payload.GetDisplay() + ","); builder.Append(payload.GetCounterType() + ","); builder.Append(payload.GetValue() + "\n"); }
public void CounterPayloadReceived(string providerName, ICounterPayload payload, bool _) { if (builder.Length > flushLength) { File.AppendAllText(_output, builder.ToString()); builder.Clear(); } builder .Append(DateTime.UtcNow.ToString()).Append(',') .Append(providerName).Append(',') .Append(payload.GetDisplay()).Append(',') .Append(payload.GetCounterType()).Append(',') .Append(payload.GetValue().ToString(CultureInfo.InvariantCulture)).Append('\n'); }
public void CounterPayloadReceived(string providerName, ICounterPayload payload, bool _) { if (builder.Length > flushLength) { File.AppendAllText(_output, builder.ToString()); builder.Clear(); } builder .Append("{ \"timestamp\": \"").Append(DateTime.Now.ToString("u")).Append("\", ") .Append(" \"provider\": \"").Append(providerName).Append("\", ") .Append(" \"name\": \"").Append(payload.GetDisplay()).Append("\", ") .Append(" \"counterType\": \"").Append(payload.GetCounterType()).Append("\", ") .Append(" \"value\": ").Append(payload.GetValue().ToString(CultureInfo.InvariantCulture)).Append(" },"); }
public void AddMetric(ICounterPayload metric) { lock (_allMetrics) { var metricKey = new MetricKey(metric); if (!_allMetrics.TryGetValue(metricKey, out Queue <ICounterPayload> metrics)) { metrics = new Queue <ICounterPayload>(); _allMetrics.Add(metricKey, metrics); } metrics.Enqueue(metric); if (metrics.Count > _maxMetricCount) { metrics.Dequeue(); } } }
private static string GetPrometheusMetric(ICounterPayload metric, out string metricValue) { string unitSuffix = string.Empty; if ((metric.Unit != null) && (!KnownUnits.TryGetValue(metric.Unit, out unitSuffix))) { //TODO The prometheus data model does not allow certain characters. Units we are not expecting could cause a scrape failure. unitSuffix = "_" + metric.Unit; } double value = metric.Value; if (string.Equals(metric.Unit, "MB", StringComparison.OrdinalIgnoreCase)) { value *= 1_000_000; //Note that the metric uses MB not MiB } metricValue = value.ToString(CultureInfo.InvariantCulture); return(FormattableString.Invariant($"{metric.Provider.Replace(".", string.Empty).ToLowerInvariant()}_{metric.Name.Replace('-', '_')}{unitSuffix}")); }
protected override void SerializeCounter(Stream stream, ICounterPayload counter) { stream.WriteByte(StreamingLogger.JsonSequenceRecordSeparator); using (var writer = new Utf8JsonWriter(stream, new JsonWriterOptions { Indented = false })) { writer.WriteStartObject(); writer.WriteString("timestamp", counter.Timestamp); writer.WriteString("provider", counter.Provider); writer.WriteString("name", counter.Name); writer.WriteString("displayName", counter.DisplayName); writer.WriteString("unit", counter.Unit); writer.WriteString("counterType", counter.CounterType.ToString()); //Some versions of .Net return invalid metric numbers. See https://github.com/dotnet/runtime/pull/46938 writer.WriteNumber("value", double.IsNaN(counter.Value) ? 0.0 : counter.Value); writer.WriteEndObject(); } stream.WriteByte((byte)'\n'); }
private void DynamicAllMonitor(TraceEvent obj) { // If we are paused, ignore the event. // There's a potential race here between the two tasks but not a huge deal if we miss by one event. _renderer.ToggleStatus(pauseCmdSet); if (obj.EventName.Equals("EventCounters")) { IDictionary <string, object> payloadVal = (IDictionary <string, object>)(obj.PayloadValue(0)); IDictionary <string, object> payloadFields = (IDictionary <string, object>)(payloadVal["Payload"]); // If it's not a counter we asked for, ignore it. if (!filter.Filter(obj.ProviderName, payloadFields["Name"].ToString())) { return; } ICounterPayload payload = payloadFields["CounterType"].Equals("Sum") ? (ICounterPayload) new IncrementingCounterPayload(payloadFields, _interval) : (ICounterPayload) new CounterPayload(payloadFields); _renderer.CounterPayloadReceived(obj.ProviderName, payload, pauseCmdSet); } }
public bool HasSatisfiedCondition(ICounterPayload payload) { long payloadTimestampTicks = payload.Timestamp.Ticks; long payloadIntervalTicks = (long)(payload.Interval * TimeSpan.TicksPerSecond); if (!_valueFilter(payload.Value)) { // Series was broken; reset state. _latestTicks = null; _targetTicks = null; return(false); } else if (!_targetTicks.HasValue) { // This is the first event in the series. Record latest and target times. _latestTicks = payloadTimestampTicks; // The target time should be the start of the first passing interval + the requisite time window. // The start of the first passing interval is the payload time stamp - the interval time. _targetTicks = payloadTimestampTicks - payloadIntervalTicks + _windowTicks; } else if (_latestTicks.Value + (1.5 * _intervalTicks) < payloadTimestampTicks) { // Detected that an event was skipped/dropped because the time between the current // event and the previous is more that 150% of the requested interval; consecutive // counter events should not have that large of an interval. Reset for current // event to be first event in series. Record latest and target times. _latestTicks = payloadTimestampTicks; // The target time should be the start of the first passing interval + the requisite time window. // The start of the first passing interval is the payload time stamp - the interval time. _targetTicks = payloadTimestampTicks - payloadIntervalTicks + _windowTicks; } else { // Update latest time to the current event time. _latestTicks = payloadTimestampTicks; } // Trigger is satisfied when the latest time is larger than the target time. return(_latestTicks >= _targetTicks); }
public void Log(ICounterPayload metric) { _metrics[string.Concat(metric.Provider, "_", metric.Name)] = metric; }
public void Log(ICounterPayload metric) { _store.AddMetric(metric); }
private static string CreateKey(ICounterPayload payload) { return(CreateKey(payload.Provider, payload.Name)); }
private static bool CompareMetrics(ICounterPayload first, ICounterPayload second) { return(string.Equals(first.Name, second.Name)); }
public void ProcessEvents(string applicationName, string serviceName, int processId, string replicaName, ReplicaStatus replica, CancellationToken cancellationToken) { var hasEventPipe = false; for (int i = 0; i < 10; ++i) { if (DiagnosticsClient.GetPublishedProcesses().Contains(processId)) { hasEventPipe = true; break; } if (cancellationToken.IsCancellationRequested) { return; } Thread.Sleep(500); } if (!hasEventPipe) { _logger.LogInformation("Process id {PID}, does not support event pipe", processId); return; } _logger.LogInformation("Listening for event pipe events for {ServiceName} on process id {PID}", replicaName, processId); // Create the logger factory for this replica using var loggerFactory = LoggerFactory.Create(builder => ConfigureLogging(serviceName, replicaName, builder)); var processor = new SimpleSpanProcessor(CreateSpanExporter(serviceName, replicaName)); var providers = new List <EventPipeProvider>() { // Runtime Metrics new EventPipeProvider( SystemRuntimeEventSourceName, EventLevel.Informational, (long)ClrTraceEventParser.Keywords.None, new Dictionary <string, string>() { { "EventCounterIntervalSec", "1" } } ), new EventPipeProvider( MicrosoftAspNetCoreHostingEventSourceName, EventLevel.Informational, (long)ClrTraceEventParser.Keywords.None, new Dictionary <string, string>() { { "EventCounterIntervalSec", "1" } } ), new EventPipeProvider( GrpcAspNetCoreServer, EventLevel.Informational, (long)ClrTraceEventParser.Keywords.None, new Dictionary <string, string>() { { "EventCounterIntervalSec", "1" } } ), // Application Metrics new EventPipeProvider( applicationName, EventLevel.Informational, (long)ClrTraceEventParser.Keywords.None, new Dictionary <string, string>() { { "EventCounterIntervalSec", "1" } } ), // Logging new EventPipeProvider( MicrosoftExtensionsLoggingProviderName, EventLevel.LogAlways, (long)(LoggingEventSource.Keywords.JsonMessage | LoggingEventSource.Keywords.FormattedMessage) ), // Distributed Tracing // Activity correlation new EventPipeProvider(TplEventSource, keywords: 0x80, eventLevel: EventLevel.LogAlways), // Diagnostic source events new EventPipeProvider(DiagnosticSourceEventSource, keywords: 0x1 | 0x2, eventLevel: EventLevel.Verbose, arguments: new Dictionary <string, string> { { "FilterAndPayloadSpecs", DiagnosticFilterString } }) }; while (!cancellationToken.IsCancellationRequested) { EventPipeSession session = null; var client = new DiagnosticsClient(processId); try { session = client.StartEventPipeSession(providers); } catch (EndOfStreamException) { break; } catch (Exception ex) { if (!cancellationToken.IsCancellationRequested) { _logger.LogDebug(0, ex, "Failed to start the event pipe session"); } // We can't even start the session, wait until the process boots up again to start another metrics thread break; } void StopSession() { try { session.Stop(); } catch (EndOfStreamException) { // If the app we're monitoring exits abruptly, this may throw in which case we just swallow the exception and exit gracefully. } // We may time out if the process ended before we sent StopTracing command. We can just exit in that case. catch (TimeoutException) { } // On Unix platforms, we may actually get a PNSE since the pipe is gone with the process, and Runtime Client Library // does not know how to distinguish a situation where there is no pipe to begin with, or where the process has exited // before dotnet-counters and got rid of a pipe that once existed. // Since we are catching this in StopMonitor() we know that the pipe once existed (otherwise the exception would've // been thrown in StartMonitor directly) catch (PlatformNotSupportedException) { } } using var _ = cancellationToken.Register(() => StopSession()); try { var source = new EventPipeEventSource(session.EventStream); var activities = new Dictionary <string, ActivityItem>(); source.Dynamic.All += traceEvent => { try { // Uncomment to debug the diagnostics source event source //if (traceEvent.EventName == "Message") //{ // _logger.LogTrace("[" + replicaName + "]:" + traceEvent.PayloadValue(0)); //} //// Distributed tracing // else if (traceEvent.EventName == "Activity1Start/Start") { var listenerEventName = (string)traceEvent.PayloadByName("EventName"); if (traceEvent.PayloadByName("Arguments") is IDictionary <string, object>[] arguments) { string activityId = null; string parentId = null; string operationName = null; string httpMethod = null; string path = null; string spanId = null; string parentSpanId = null; string traceId = null; DateTime startTime = default; ActivityIdFormat idFormat = default; foreach (var arg in arguments) { var key = (string)arg["Key"]; var value = (string)arg["Value"]; if (key == "ActivityId") { activityId = value; } else if (key == "ActivityParentId") { parentId = value; } else if (key == "ActivityOperationName") { operationName = value; } else if (key == "ActivitySpanId") { spanId = value; } else if (key == "ActivityTraceId") { traceId = value; } else if (key == "ActivityParentSpanId") { parentSpanId = value; } else if (key == "Method") { httpMethod = value; } else if (key == "Path") { path = value; } else if (key == "ActivityStartTime") { startTime = new DateTime(long.Parse(value), DateTimeKind.Utc); } else if (key == "ActivityIdFormat") { idFormat = Enum.Parse <ActivityIdFormat>(value); } } if (string.IsNullOrEmpty(activityId)) { // Not a 3.1 application (we can detect this earlier) return; } if (idFormat == ActivityIdFormat.Hierarchical) { // We need W3C to make it work return; } // This is what open telemetry currently does // https://github.com/open-telemetry/opentelemetry-dotnet/blob/4ba732af062ddc2759c02aebbc91335aaa3f7173/src/OpenTelemetry.Collector.AspNetCore/Implementation/HttpInListener.cs#L65-L92 var item = new ActivityItem() { Name = path, SpanId = ActivitySpanId.CreateFromString(spanId), TraceId = ActivityTraceId.CreateFromString(traceId), ParentSpanId = parentSpanId == "0000000000000000" ? default : ActivitySpanId.CreateFromString(parentSpanId), StartTime = startTime, }; item.Attributes[SpanAttributeConstants.HttpMethodKey] = httpMethod; item.Attributes[SpanAttributeConstants.HttpPathKey] = path; activities[activityId] = item; } } else if (traceEvent.EventName == "Activity1Stop/Stop") { var listenerEventName = (string)traceEvent.PayloadByName("EventName"); if (traceEvent.PayloadByName("Arguments") is IDictionary <string, object>[] arguments) { string activityId = null; TimeSpan duration = default; int statusCode = 0; foreach (var arg in arguments) { var key = (string)arg["Key"]; var value = (string)arg["Value"]; if (key == "ActivityId") { activityId = value; } else if (key == "StatusCode") { statusCode = int.Parse(value); } else if (key == "ActivityDuration") { duration = new TimeSpan(long.Parse(value)); } } if (string.IsNullOrEmpty(activityId)) { // Not a 3.1 application (we can detect this earlier) return; } if (activities.TryGetValue(activityId, out var item)) { item.Attributes[SpanAttributeConstants.HttpStatusCodeKey] = statusCode; item.EndTime = item.StartTime + duration; var spanData = new SpanData(item.Name, new SpanContext(item.TraceId, item.SpanId, ActivityTraceFlags.Recorded), item.ParentSpanId, SpanKind.Server, item.StartTime, item.Attributes, Enumerable.Empty <Event>(), Enumerable.Empty <Link>(), null, Status.Ok, item.EndTime); processor.OnEnd(spanData); activities.Remove(activityId); } } } else if (traceEvent.EventName == "Activity2Start/Start") { var listenerEventName = (string)traceEvent.PayloadByName("EventName"); _logger.LogDebug("[" + replicaName + "]: " + listenerEventName + " fired"); } else if (traceEvent.EventName == "Activity2Stop/Stop") { var listenerEventName = (string)traceEvent.PayloadByName("EventName"); _logger.LogDebug("[" + replicaName + "]: " + listenerEventName + " fired"); } // Metrics else if (traceEvent.EventName.Equals("EventCounters")) { var payloadVal = (IDictionary <string, object>)traceEvent.PayloadValue(0); var eventPayload = (IDictionary <string, object>)payloadVal["Payload"]; ICounterPayload payload = CounterPayload.FromPayload(eventPayload); replica.Metrics[traceEvent.ProviderName + "/" + payload.Name] = payload.Value; } } catch (Exception ex) { _logger.LogError(ex, "Error processing counter for {ProviderName}:{EventName}", traceEvent.ProviderName, traceEvent.EventName); } }; // Logging string lastFormattedMessage = ""; var logActivities = new Dictionary <Guid, LogActivityItem>(); var stack = new Stack <Guid>(); source.Dynamic.AddCallbackForProviderEvent(MicrosoftExtensionsLoggingProviderName, "ActivityJsonStart/Start", (traceEvent) => { var factoryId = (int)traceEvent.PayloadByName("FactoryID"); var categoryName = (string)traceEvent.PayloadByName("LoggerName"); var argsJson = (string)traceEvent.PayloadByName("ArgumentsJson"); // TODO: Store this information by logger factory id var item = new LogActivityItem { ActivityID = traceEvent.ActivityID, ScopedObject = new LogObject(JsonDocument.Parse(argsJson).RootElement), }; if (stack.TryPeek(out var parentId) && logActivities.TryGetValue(parentId, out var parentItem)) { item.Parent = parentItem; } stack.Push(traceEvent.ActivityID); logActivities[traceEvent.ActivityID] = item; }); source.Dynamic.AddCallbackForProviderEvent(MicrosoftExtensionsLoggingProviderName, "ActivityJsonStop/Stop", (traceEvent) => { var factoryId = (int)traceEvent.PayloadByName("FactoryID"); var categoryName = (string)traceEvent.PayloadByName("LoggerName"); stack.Pop(); logActivities.Remove(traceEvent.ActivityID); }); source.Dynamic.AddCallbackForProviderEvent(MicrosoftExtensionsLoggingProviderName, "MessageJson", (traceEvent) => { // Level, FactoryID, LoggerName, EventID, EventName, ExceptionJson, ArgumentsJson var logLevel = (LogLevel)traceEvent.PayloadByName("Level"); var factoryId = (int)traceEvent.PayloadByName("FactoryID"); var categoryName = (string)traceEvent.PayloadByName("LoggerName"); var eventId = (int)traceEvent.PayloadByName("EventId"); var eventName = (string)traceEvent.PayloadByName("EventName"); var exceptionJson = (string)traceEvent.PayloadByName("ExceptionJson"); var argsJson = (string)traceEvent.PayloadByName("ArgumentsJson"); // There's a bug that causes some of the columns to get mixed up if (eventName.StartsWith("{")) { argsJson = exceptionJson; exceptionJson = eventName; eventName = null; } if (string.IsNullOrEmpty(argsJson)) { return; } Exception exception = null; var logger = loggerFactory.CreateLogger(categoryName); var scopes = new List <IDisposable>(); if (logActivities.TryGetValue(traceEvent.ActivityID, out var logActivityItem)) { // REVIEW: Does order matter here? We're combining everything anyways. while (logActivityItem != null) { scopes.Add(logger.BeginScope(logActivityItem.ScopedObject)); logActivityItem = logActivityItem.Parent; } } try { if (exceptionJson != "{}") { var exceptionMessage = JsonSerializer.Deserialize <JsonElement>(exceptionJson); exception = new LoggerException(exceptionMessage); } var message = JsonSerializer.Deserialize <JsonElement>(argsJson); if (message.TryGetProperty("{OriginalFormat}", out var formatElement)) { var formatString = formatElement.GetString(); var formatter = new LogValuesFormatter(formatString); object[] args = new object[formatter.ValueNames.Count]; for (int i = 0; i < args.Length; i++) { args[i] = message.GetProperty(formatter.ValueNames[i]).GetString(); } logger.Log(logLevel, new EventId(eventId, eventName), exception, formatString, args); } else { var obj = new LogObject(message, lastFormattedMessage); logger.Log(logLevel, new EventId(eventId, eventName), obj, exception, LogObject.Callback); } } catch (Exception ex) { _logger.LogDebug(ex, "Error processing log entry for {ServiceName}", replicaName); } finally { scopes.ForEach(d => d.Dispose()); } }); source.Dynamic.AddCallbackForProviderEvent(MicrosoftExtensionsLoggingProviderName, "FormattedMessage", (traceEvent) => { // Level, FactoryID, LoggerName, EventID, EventName, FormattedMessage var logLevel = (LogLevel)traceEvent.PayloadByName("Level"); var factoryId = (int)traceEvent.PayloadByName("FactoryID"); var categoryName = (string)traceEvent.PayloadByName("LoggerName"); var eventId = (int)traceEvent.PayloadByName("EventId"); var eventName = (string)traceEvent.PayloadByName("EventName"); var formattedMessage = (string)traceEvent.PayloadByName("FormattedMessage"); if (string.IsNullOrEmpty(formattedMessage)) { formattedMessage = eventName; eventName = ""; } lastFormattedMessage = formattedMessage; }); source.Process(); }
public static bool TryGetCounterPayload(this TraceEvent traceEvent, CounterFilter filter, out ICounterPayload payload) { payload = null; if ("EventCounters".Equals(traceEvent.EventName)) { IDictionary <string, object> payloadVal = (IDictionary <string, object>)(traceEvent.PayloadValue(0)); IDictionary <string, object> payloadFields = (IDictionary <string, object>)(payloadVal["Payload"]); //Make sure we are part of the requested series. If multiple clients request metrics, all of them get the metrics. string series = payloadFields["Series"].ToString(); string counterName = payloadFields["Name"].ToString(); //CONSIDER //Concurrent counter sessions do not each get a separate interval. Instead the payload //for _all_ the counters changes the Series to be the lowest specified interval, on a per provider basis. //Currently the CounterFilter will remove any data whose Series doesn't match the requested interval. if (!filter.IsIncluded(traceEvent.ProviderName, counterName, GetInterval(series))) { return(false); } float intervalSec = (float)payloadFields["IntervalSec"]; string displayName = payloadFields["DisplayName"].ToString(); string displayUnits = payloadFields["DisplayUnits"].ToString(); double value = 0; CounterType counterType = CounterType.Metric; if (payloadFields["CounterType"].Equals("Mean")) { value = (double)payloadFields["Mean"]; } else if (payloadFields["CounterType"].Equals("Sum")) { counterType = CounterType.Rate; value = (double)payloadFields["Increment"]; if (string.IsNullOrEmpty(displayUnits)) { displayUnits = "count"; } //TODO Should we make these /sec like the dotnet-counters tool? } // Note that dimensional data such as pod and namespace are automatically added in prometheus and azure monitor scenarios. // We no longer added it here. payload = new CounterPayload( traceEvent.TimeStamp, traceEvent.ProviderName, counterName, displayName, displayUnits, value, counterType, intervalSec); return(true); } return(false); }
public MetricKey(ICounterPayload metric) { _metric = metric; }
protected abstract void SerializeCounter(Stream stream, ICounterPayload counter);