public void FlushMetrics() { flushStopwatch.Restart(); if (BeforeFlush != null) { BeforeFlush(); } var time_stamp = (long)Math.Round(DateTimeToUnixTimestamp(DateTime.UtcNow)); // seconds if (old_timestamp > 0) { gauges["statsd.timestamp_lag_namespace"] = (time_stamp - old_timestamp - (FlushInterval / 1000)); } old_timestamp = time_stamp; Metrics metrics = null; flushMetricsReaderWriterLock.EnterWriteLock(); try { metrics = new Metrics { counters = new Dictionary<string, long>(counters), gauges = new Dictionary<string, long>(gauges), timers = new Dictionary<string, List<long>>(timers), timer_counters = new Dictionary<string, long>(timer_counters), sets = new Dictionary<string, HashSet<string>>(sets), counter_rates = new Dictionary<string, long>(), timer_data = new Dictionary<string, Dictionary<string, long>>(), pctThreshold = PctThreshold, statsd_metrics = new Dictionary<string, long>(), }; ClearMetrics(); } finally { flushMetricsReaderWriterLock.ExitWriteLock(); } ProcessMetrics(metrics, FlushInterval, time_stamp); if (FlushToConsole) { Console.Clear(); Console.WriteLine("Flush=" + time_stamp); foreach (var item in metrics.counters) { Console.WriteLine("stats.counters.{0}.count = {1}", item.Key, item.Value); Console.WriteLine("stats.counters.{0}.rate = {1}", item.Key, metrics.counter_rates[item.Key]); } foreach (var item in metrics.timers) { foreach (var data in metrics.timer_data[item.Key]) { Console.WriteLine("stats.timers.{0}.{1} = {2}", item.Key, data.Key, data.Value); } } foreach (var item in metrics.gauges) { Console.WriteLine("stats.gauges.{0} = {1}", item.Key, item.Value); } foreach (var item in metrics.sets) { Console.WriteLine("stats.sets.{0}.count = {1}", item.Key, item.Value.Count); } Console.WriteLine("Flush End=" + time_stamp); } if (OnFlush != null) { OnFlush(time_stamp, metrics); } flushStopwatch.Stop(); InReadLock(() => { AddToGauge("statsd.flush_duration", (int)Math.Round(flushStopwatch.Elapsed.TotalMilliseconds)); }); }
public static void ProcessMetrics(Metrics metrics, double flushInterval, long ts) { var sw = Stopwatch.StartNew(); var counter_rates = (Dictionary<string, long>)metrics.counter_rates; var timer_data = (Dictionary<string, Dictionary<string, long>>)metrics.timer_data; var statsd_metrics = (Dictionary<string, long>)metrics.statsd_metrics; var counters = (Dictionary<string, long>)metrics.counters; var timers = (Dictionary<string, List<long>>)metrics.timers; var timer_counters = (Dictionary<string, long>)metrics.timer_counters; var pctThreshold = (int[])metrics.pctThreshold; //var histogram = metrics.histogram; foreach (var key in counters.Keys) { var value = (double)counters[key]; // calculate "per second" rate counter_rates[key] = (long)Math.Round(value / (flushInterval / 1000d)); } foreach (var key in timers.Keys) { var current_timer_data = new Dictionary<string, long>(); timer_data[key] = current_timer_data; if (timers[key].Count > 0) { var values = timers[key]; values.Sort(); var count = values.Count; var min = values[0]; var max = values[count - 1]; var cumulativeValues = new List<long>() { min }; var cumulSumSquaresValues = new List<long>() { min * min }; for (var i = 1; i < count; i++) { cumulativeValues.Add(values[i] + cumulativeValues[i - 1]); cumulSumSquaresValues.Add((values[i] * values[i]) + cumulSumSquaresValues[i - 1]); } var sum = min; var sumSquares = min * min; var mean = min; var thresholdBoundary = max; foreach (var pct in pctThreshold) { var numInThreshold = count; if (count > 1) { numInThreshold = (int)Math.Round(((double)Math.Abs(pct) / 100d) * (double)count); if (numInThreshold == 0) { continue; } if (pct > 0) { thresholdBoundary = values[numInThreshold - 1]; sum = cumulativeValues[numInThreshold - 1]; sumSquares = cumulSumSquaresValues[numInThreshold - 1]; } else { thresholdBoundary = values[count - numInThreshold]; sum = cumulativeValues[count - 1] - cumulativeValues[count - numInThreshold - 1]; sumSquares = cumulSumSquaresValues[count - 1] - cumulSumSquaresValues[count - numInThreshold - 1]; } mean = (long)Math.Round((double)sum / (double)numInThreshold); } var clean_pct = "" + pct; clean_pct = clean_pct.Replace(".", "_").Replace("-", "top"); current_timer_data["count_" + clean_pct] = numInThreshold; current_timer_data["mean_" + clean_pct] = mean; current_timer_data[(pct > 0 ? "upper_" : "lower_") + clean_pct] = thresholdBoundary; current_timer_data["sum_" + clean_pct] = sum; current_timer_data["sum_squares_" + clean_pct] = sumSquares; } sum = cumulativeValues[count - 1]; sumSquares = cumulSumSquaresValues[count - 1]; mean = (long)Math.Round((double)sum / (double)count); long sumOfDiffs = 0; for (var i = 0; i < count; i++) { sumOfDiffs += (values[i] - mean) * (values[i] - mean); } var mid = (int)Math.Floor((double)count / 2d); var median = (count % 2) > 0 ? values[mid] : (values[mid - 1] + values[mid]) / 2; var stddev = Math.Sqrt(Math.Round((double)sumOfDiffs / (double)count)); current_timer_data["std"] = (long)stddev; current_timer_data["upper"] = max; current_timer_data["lower"] = min; current_timer_data["count"] = timer_counters[key]; current_timer_data["count_ps"] = (long)Math.Round((double)timer_counters[key] / (flushInterval / 1000d)); current_timer_data["sum"] = sum; current_timer_data["sum_squares"] = sumSquares; current_timer_data["mean"] = mean; current_timer_data["median"] = median; } else { current_timer_data["count"] = 0; current_timer_data["count_ps"] = 0; } } sw.Stop(); statsd_metrics["processing_time"] = (int)Math.Round(sw.Elapsed.TotalSeconds); }
public static void Flush(long time_stamp, Metrics metrics) { if (!FlushToDatabase) { return; } if (!metrics.sets.ContainsKey("haproxy.logs.host")) { return; } if (sw == null) { sw = new Stopwatch(); } sw.Restart(); using (var c = GetOpenSqlConnection()) { var applications = metrics.sets.GetValueOrDefault("haproxy.logs.applications", EmptySet); foreach (var host in metrics.sets["haproxy.logs.host"].OrderBy(x => x)) { var hostClean = host.Replace('.', '_'); foreach (var routeName in metrics.sets["haproxy.logs.routes"].OrderBy(x => x)) { var applicationId = routeName.IndexOf(".") > 0 && applications.Contains(routeName.Substring(0, routeName.IndexOf("."))) ? routeName.Substring(0, routeName.IndexOf(".")) : ""; var routeNameClean = routeName.Replace('.', '_'); if (!metrics.counters.ContainsKey("haproxy.logs." + hostClean + ".route." + routeNameClean + ".hits")) { continue; // invalid route/host combo } var row = new { Timestamp = time_stamp, Host = host, ApplicationId = applicationId, RouteName = routeName, Hits = metrics.counters["haproxy.logs." + hostClean + ".route." + routeNameClean + ".hits"], BytesRead = metrics.counters["haproxy.logs." + hostClean + ".route." + routeNameClean + ".bytes_read"], Tr_median = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["median"], Tr_mean = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["mean"], Tr_sum = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["sum"], Tr_count_90 = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["sum_90"] > 0 ? metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["count_90"] : 0, Tr_mean_90 = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["mean_90"], Tr_sum_90 = metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["sum_90"], AspNetDurationMs_median = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["median"], AspNetDurationMs_mean = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["mean"], AspNetDurationMs_sum = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["sum"], AspNetDurationMs_count_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["sum_90"] > 0 ? metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["count_90"] : 0, AspNetDurationMs_mean_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["mean_90"], AspNetDurationMs_sum_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["sum_90"], SqlCount_median = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["median"], SqlCount_mean = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["mean"], SqlCount_sum = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["sum"], SqlCount_count_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["sum_90"] > 0 ? metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["count_90"] : 0, SqlCount_mean_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["mean_90"], SqlCount_sum_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["sum_90"], SqlDurationMs_median = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["median"], SqlDurationMs_mean = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["mean"], SqlDurationMs_sum = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["sum"], SqlDurationMs_count_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["sum_90"] > 0 ? metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["count_90"] : 0, SqlDurationMs_mean_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["mean_90"], SqlDurationMs_sum_90 = metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["sum_90"], }; InsertTrafficSummaryRow(row, c); } } if (metrics.gauges.ContainsKey("haproxy.logs.actconn")) { var actconn = metrics.gauges["haproxy.logs.actconn"]; foreach (var frontend_name in metrics.sets["haproxy.logs.fe"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".feconn")) { continue; } var feconn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".feconn"]; foreach (var backend_name in metrics.sets["haproxy.logs.be"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".beconn")) { continue; } var beconn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".beconn"]; foreach (var server_name in metrics.sets["haproxy.logs.srv"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".srv." + server_name + ".srv_conn")) { continue; } var srv_conn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".srv." + server_name + ".srv_conn"]; var row = new { Timestamp = time_stamp, Frontend = frontend_name, Backend = backend_name, Server = server_name, actconn = actconn, feconn = feconn, beconn = beconn, srv_conn = srv_conn, }; InsertLoadBalancerStatisticsRow(row, c); } } } } long packets_received = 0; long metrics_received = 0; long databasewriter_duration = 0; long flush_duration = 0; long timestamp_lag_namespace = 0; long queue_length = metrics.gauges["haproxy.logs.queue"]; if (metrics.counters.ContainsKey("haproxy.logs.packets_received")) { packets_received = metrics.counters["haproxy.logs.packets_received"]; } if (metrics.counters.ContainsKey("statsd.metrics_received")) { metrics_received =metrics.counters["statsd.metrics_received"]; } if (metrics.gauges.ContainsKey("statsd.haproxy.databasewriter_duration")) { databasewriter_duration = metrics.gauges["statsd.haproxy.databasewriter_duration"]; } if (metrics.gauges.ContainsKey("statsd.flush_duration")) { flush_duration = metrics.gauges["statsd.flush_duration"]; } if (metrics.gauges.ContainsKey("statsd.timestamp_lag_namespace")) { timestamp_lag_namespace = metrics.gauges["statsd.timestamp_lag_namespace"]; } var stats_row = new { Timestamp = time_stamp, QueueLength = queue_length, PacketsReceived = packets_received, MetricsReceived = metrics_received, DatabaseWriterDurationMS = databasewriter_duration, FlushDurationMS = flush_duration, TimestampLagNamespace = timestamp_lag_namespace, }; InsertHAProxyTrafficLoggerStatisticsRow(stats_row, c); } sw.Stop(); TrafficLog.collector.InReadLock(() => { TrafficLog.collector.SetGauge("statsd.haproxy.databasewriter_duration", (int)Math.Round(sw.Elapsed.TotalMilliseconds)); }); }
public static void Flush(long time_stamp, Metrics metrics) { if (!FlushToConsole) { return; } Console.Clear(); Console.WriteLine("statsd haproxy.logs: " + ExtensionMethods.UnixTimeStampToDateTime(time_stamp).ToString("O")); Console.WriteLine(); if (!metrics.sets.ContainsKey("haproxy.logs.host")) { return; } Console.WriteLine("{0,10} {1,5} {2,15} {3,7} {4,7:F0} {5,4} {6,5} {7,5} {8,5}" , "host" , "appid" , "route" , "hits" , "kb/sum" , "tr" , "asp_d" , "sql_c" , "sql_d" ); var applications = metrics.sets.GetValueOrDefault("haproxy.logs.applications", EmptySet); foreach (var host in metrics.sets["haproxy.logs.host"].OrderBy(x => x)) { var hostClean = host.Replace('.', '_'); foreach (var routeName in metrics.sets["haproxy.logs.routes"].OrderBy(x => x)) { var applicationId = routeName.IndexOf(".") > 0 && applications.Contains(routeName.Substring(0, routeName.IndexOf("."))) ? routeName.Substring(0, routeName.IndexOf(".")) : ""; var routeNameClean = routeName.Replace('.', '_'); if (!metrics.counters.ContainsKey("haproxy.logs." + hostClean + ".route." + routeNameClean + ".hits")) { continue; // invalid route/host combo } Console.WriteLine("{0,10} {1,5} {2,15} {3,7} {4,7:F0} {5,4} {6,5} {7,5} {8,5}" , TrimAndPad(host, 10) , TrimAndPad(applicationId, 5) , TrimAndPad(routeName.Replace(applicationId + ".", ""), 15) , metrics.counters["haproxy.logs." + hostClean + ".route." + routeNameClean + ".hits"] , (double)metrics.counters["haproxy.logs." + hostClean + ".route." + routeNameClean + ".bytes_read"] / 1024d , metrics.timer_data["haproxy.logs." + hostClean + ".route." + routeNameClean + ".tr"]["mean"] , metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".AspNetDurationMs", EmptyTimerData)["mean"] , metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlCount", EmptyTimerData)["mean"] , metrics.timer_data.GetValueOrDefault("haproxy.logs." + hostClean + ".route." + routeNameClean + ".SqlDurationMs", EmptyTimerData)["mean"] ); } } Console.WriteLine(); Console.WriteLine("{0,10} {1,10} {2,10} {3,10} {4,10} {5,10} {6,10}" , "actconn" , "fe_name" , "feconn" , "be_name" , "beconn" , "srv_name" , "srv_conn" ); if (metrics.gauges.ContainsKey("haproxy.logs.actconn")) { var actconn = metrics.gauges["haproxy.logs.actconn"]; foreach (var frontend_name in metrics.sets["haproxy.logs.fe"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".feconn")) { continue; } var feconn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".feconn"]; foreach (var backend_name in metrics.sets["haproxy.logs.be"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".beconn")) { continue; } var beconn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".beconn"]; foreach (var server_name in metrics.sets["haproxy.logs.srv"].OrderBy(x => x)) { if (!metrics.gauges.ContainsKey("haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".srv." + server_name + ".srv_conn")) { continue; } var srv_conn = metrics.gauges["haproxy.logs.fe." + frontend_name + ".be." + backend_name + ".srv." + server_name + ".srv_conn"]; Console.WriteLine("{0,10} {1,10} {2,10} {3,10} {4,10} {5,10} {6,10}" , actconn , TrimAndPad(frontend_name, 10) , feconn , TrimAndPad(backend_name, 10) , beconn , TrimAndPad(server_name, 10) , srv_conn ); } } } } Console.WriteLine(); Console.WriteLine("haproxy.logs.queue=" + metrics.gauges["haproxy.logs.queue"]); if (metrics.counters.ContainsKey("haproxy.logs.packets_received")) { Console.WriteLine("haproxy.logs.packets_received=" + metrics.counters["haproxy.logs.packets_received"]); } if (metrics.counters.ContainsKey("statsd.metrics_received")) { Console.WriteLine("statsd.metrics_received=" + metrics.counters["statsd.metrics_received"]); } if (metrics.gauges.ContainsKey("statsd.haproxy.databasewriter_duration")) { Console.WriteLine("statsd.haproxy.databasewriter_duration=" + metrics.gauges["statsd.haproxy.databasewriter_duration"]); } if (metrics.gauges.ContainsKey("statsd.flush_duration")) { Console.WriteLine("statsd.flush_duration=" + metrics.gauges["statsd.flush_duration"]); } if (metrics.gauges.ContainsKey("statsd.timestamp_lag_namespace")) { Console.WriteLine("statsd.timestamp_lag_namespace=" + metrics.gauges["statsd.timestamp_lag_namespace"]); } }
public static void ProcessMetrics(Metrics metrics, double flushInterval, long ts) { var sw = Stopwatch.StartNew(); var counter_rates = (Dictionary <string, long>)metrics.counter_rates; var timer_data = (Dictionary <string, Dictionary <string, long> >)metrics.timer_data; var statsd_metrics = (Dictionary <string, long>)metrics.statsd_metrics; var counters = (Dictionary <string, long>)metrics.counters; var timers = (Dictionary <string, List <long> >)metrics.timers; var timer_counters = (Dictionary <string, long>)metrics.timer_counters; var pctThreshold = (int[])metrics.pctThreshold; //var histogram = metrics.histogram; foreach (var key in counters.Keys) { var value = (double)counters[key]; // calculate "per second" rate counter_rates[key] = (long)Math.Round(value / (flushInterval / 1000d)); } foreach (var key in timers.Keys) { var current_timer_data = new Dictionary <string, long>(); timer_data[key] = current_timer_data; if (timers[key].Count > 0) { var values = timers[key]; values.Sort(); var count = values.Count; var min = values[0]; var max = values[count - 1]; var cumulativeValues = new List <long>() { min }; var cumulSumSquaresValues = new List <long>() { min *min }; for (var i = 1; i < count; i++) { cumulativeValues.Add(values[i] + cumulativeValues[i - 1]); cumulSumSquaresValues.Add((values[i] * values[i]) + cumulSumSquaresValues[i - 1]); } var sum = min; var sumSquares = min * min; var mean = min; var thresholdBoundary = max; foreach (var pct in pctThreshold) { var numInThreshold = count; if (count > 1) { numInThreshold = (int)Math.Round(((double)Math.Abs(pct) / 100d) * (double)count); if (numInThreshold == 0) { continue; } if (pct > 0) { thresholdBoundary = values[numInThreshold - 1]; sum = cumulativeValues[numInThreshold - 1]; sumSquares = cumulSumSquaresValues[numInThreshold - 1]; } else { thresholdBoundary = values[count - numInThreshold]; sum = cumulativeValues[count - 1] - cumulativeValues[count - numInThreshold - 1]; sumSquares = cumulSumSquaresValues[count - 1] - cumulSumSquaresValues[count - numInThreshold - 1]; } mean = (long)Math.Round((double)sum / (double)numInThreshold); } var clean_pct = "" + pct; clean_pct = clean_pct.Replace(".", "_").Replace("-", "top"); current_timer_data["count_" + clean_pct] = numInThreshold; current_timer_data["mean_" + clean_pct] = mean; current_timer_data[(pct > 0 ? "upper_" : "lower_") + clean_pct] = thresholdBoundary; current_timer_data["sum_" + clean_pct] = sum; current_timer_data["sum_squares_" + clean_pct] = sumSquares; } sum = cumulativeValues[count - 1]; sumSquares = cumulSumSquaresValues[count - 1]; mean = (long)Math.Round((double)sum / (double)count); long sumOfDiffs = 0; for (var i = 0; i < count; i++) { sumOfDiffs += (values[i] - mean) * (values[i] - mean); } var mid = (int)Math.Floor((double)count / 2d); var median = (count % 2) > 0 ? values[mid] : (values[mid - 1] + values[mid]) / 2; var stddev = Math.Sqrt(Math.Round((double)sumOfDiffs / (double)count)); current_timer_data["std"] = (long)stddev; current_timer_data["upper"] = max; current_timer_data["lower"] = min; current_timer_data["count"] = timer_counters[key]; current_timer_data["count_ps"] = (long)Math.Round((double)timer_counters[key] / (flushInterval / 1000d)); current_timer_data["sum"] = sum; current_timer_data["sum_squares"] = sumSquares; current_timer_data["mean"] = mean; current_timer_data["median"] = median; } else { current_timer_data["count"] = 0; current_timer_data["count_ps"] = 0; } } sw.Stop(); statsd_metrics["processing_time"] = (int)Math.Round(sw.Elapsed.TotalSeconds); }
public void FlushMetrics() { flushStopwatch.Restart(); if (BeforeFlush != null) { BeforeFlush(); } var time_stamp = (long)Math.Round(DateTimeToUnixTimestamp(DateTime.UtcNow)); // seconds if (old_timestamp > 0) { gauges["statsd.timestamp_lag_namespace"] = (time_stamp - old_timestamp - (FlushInterval / 1000)); } old_timestamp = time_stamp; Metrics metrics = null; flushMetricsReaderWriterLock.EnterWriteLock(); try { metrics = new Metrics { counters = new Dictionary <string, long>(counters), gauges = new Dictionary <string, long>(gauges), timers = new Dictionary <string, List <long> >(timers), timer_counters = new Dictionary <string, long>(timer_counters), sets = new Dictionary <string, HashSet <string> >(sets), counter_rates = new Dictionary <string, long>(), timer_data = new Dictionary <string, Dictionary <string, long> >(), pctThreshold = PctThreshold, statsd_metrics = new Dictionary <string, long>(), }; ClearMetrics(); } finally { flushMetricsReaderWriterLock.ExitWriteLock(); } ProcessMetrics(metrics, FlushInterval, time_stamp); if (FlushToConsole) { Console.Clear(); Console.WriteLine("Flush=" + time_stamp); foreach (var item in metrics.counters) { Console.WriteLine("stats.counters.{0}.count = {1}", item.Key, item.Value); Console.WriteLine("stats.counters.{0}.rate = {1}", item.Key, metrics.counter_rates[item.Key]); } foreach (var item in metrics.timers) { foreach (var data in metrics.timer_data[item.Key]) { Console.WriteLine("stats.timers.{0}.{1} = {2}", item.Key, data.Key, data.Value); } } foreach (var item in metrics.gauges) { Console.WriteLine("stats.gauges.{0} = {1}", item.Key, item.Value); } foreach (var item in metrics.sets) { Console.WriteLine("stats.sets.{0}.count = {1}", item.Key, item.Value.Count); } Console.WriteLine("Flush End=" + time_stamp); } if (OnFlush != null) { OnFlush(time_stamp, metrics); } flushStopwatch.Stop(); InReadLock(() => { AddToGauge("statsd.flush_duration", (int)Math.Round(flushStopwatch.Elapsed.TotalMilliseconds)); }); }