public virtual void TestContainerMetricsLimit() { string Err = "Error in number of records"; MetricsSystem system = Org.Mockito.Mockito.Mock <MetricsSystem>(); Org.Mockito.Mockito.DoReturn(this).When(system).Register(Matchers.AnyString(), Matchers.AnyString (), Matchers.Any()); MetricsCollectorImpl collector = new MetricsCollectorImpl(); ContainerId containerId = Org.Mockito.Mockito.Mock <ContainerId>(); ContainerMetrics metrics = ContainerMetrics.ForContainer(containerId, 100, 1); int anyPmemLimit = 1024; int anyVmemLimit = 2048; int anyVcores = 10; string anyProcessId = "1234"; metrics.RecordResourceLimit(anyVmemLimit, anyPmemLimit, anyVcores); metrics.RecordProcessId(anyProcessId); Sharpen.Thread.Sleep(110); metrics.GetMetrics(collector, true); NUnit.Framework.Assert.AreEqual(Err, 1, collector.GetRecords().Count); MetricsRecord record = collector.GetRecords()[0]; MetricsRecords.AssertTag(record, ContainerMetrics.ProcessidInfo.Name(), anyProcessId ); MetricsRecords.AssertMetric(record, ContainerMetrics.PmemLimitMetricName, anyPmemLimit ); MetricsRecords.AssertMetric(record, ContainerMetrics.VmemLimitMetricName, anyVmemLimit ); MetricsRecords.AssertMetric(record, ContainerMetrics.VcoreLimitMetricName, anyVcores ); collector.Clear(); }
/// <summary>Asserts that filters with the given configuration reject the given record. /// </summary> /// <param name="conf">SubsetConfiguration containing filter configuration</param> /// <param name="record">MetricsRecord to check</param> internal static void ShouldReject(SubsetConfiguration conf, MetricsRecord record) { Assert.True("rejects " + record, !NewGlobFilter(conf).Accepts(record )); Assert.True("rejects " + record, !NewRegexFilter(conf).Accepts( record)); }
public void TestMetricsRecord() { const string name = "recname"; const string desc = "rec desc"; const long timeStamp = 1000; const string context = "context"; const string counterName = "counter"; const long counterValue = 2; const string gaugeName = "gauge"; const double gaugeValue = 3.0; const string tagName = "tagName"; const string tagValue = "tagValue"; IList <IImmutableMetric> metrics = new List <IImmutableMetric>(); metrics.Add(new ImmutableCounter(new MetricsInfoImpl(counterName, counterName), counterValue)); metrics.Add(new ImmutableDoubleGauge(new MetricsInfoImpl(gaugeName, gaugeName), gaugeValue)); IList <MetricsTag> tags = new List <MetricsTag>(); tags.Add(new MetricsTag(new MetricsInfoImpl(tagName, tagName), tagValue)); MetricsInfoImpl info = new MetricsInfoImpl(name, desc); MetricsRecord record = new MetricsRecord(info, timeStamp, metrics, tags, context); Assert.Equal(name, record.Name); Assert.Equal(desc, record.Description); Assert.Equal(context, record.Context); Assert.Equal(timeStamp, record.Timestamp); Assert.Equal(metrics, record.Metrics); Assert.Equal(tags, record.Tags); }
public virtual void PutMetrics(MetricsRecord record) { while (!closed) { collectingLatch.CountDown(); } }
/// <summary>Update the cache and return the current cached record</summary> /// <param name="mr">the update record</param> /// <param name="includingTags">cache tag values (for later lookup by name) if true</param> /// <returns>the updated cache record</returns> public virtual MetricsCache.Record Update(MetricsRecord mr, bool includingTags) { string name = mr.Name(); MetricsCache.RecordCache recordCache = map[name]; if (recordCache == null) { recordCache = new MetricsCache.RecordCache(this); map[name] = recordCache; } ICollection <MetricsTag> tags = mr.Tags(); MetricsCache.Record record = recordCache[tags]; if (record == null) { record = new MetricsCache.Record(); recordCache[tags] = record; } foreach (AbstractMetric m in mr.Metrics()) { record.metrics[m.Name()] = m; } if (includingTags) { // mostly for some sinks that include tags as part of a dense schema foreach (MetricsTag t in mr.Tags()) { record.tags[t.Name()] = t.Value(); } } return(record); }
public virtual void PutMetrics(MetricsRecord record) { writer.Write(record.Timestamp()); writer.Write(" "); writer.Write(record.Context()); writer.Write("."); writer.Write(record.Name()); string separator = ": "; foreach (MetricsTag tag in record.Tags()) { writer.Write(separator); separator = ", "; writer.Write(tag.Name()); writer.Write("="); writer.Write(tag.Value()); } foreach (AbstractMetric metric in record.Metrics()) { writer.Write(separator); separator = ", "; writer.Write(metric.Name()); writer.Write("="); writer.Write(metric.Value()); } writer.WriteLine(); }
// List containing nc1 and nc2. /// <summary> /// Initializes, for testing, two NoEmitMetricsContext's, and adds one value /// to the first of them. /// </summary> /// <exception cref="System.IO.IOException"/> protected override void SetUp() { nc1 = new NoEmitMetricsContext(); nc1.Init("test1", ContextFactory.GetFactory()); nc2 = new NoEmitMetricsContext(); nc2.Init("test2", ContextFactory.GetFactory()); contexts = new AList <MetricsContext>(); contexts.AddItem(nc1); contexts.AddItem(nc2); MetricsRecord r = nc1.CreateRecord("testRecord"); r.SetTag("testTag1", "testTagValue1"); r.SetTag("testTag2", "testTagValue2"); r.SetMetric("testMetric1", 1); r.SetMetric("testMetric2", 33); r.Update(); IDictionary <string, ICollection <OutputRecord> > m = nc1.GetAllRecords(); Assert.Equal(1, m.Count); Assert.Equal(1, m.Values.Count); ICollection <OutputRecord> outputRecords = m.Values.GetEnumerator().Next(); Assert.Equal(1, outputRecords.Count); outputRecord = outputRecords.GetEnumerator().Next(); }
public static void AssertMetric(MetricsRecord record, string metricName, Number expectedValue ) { AbstractMetric resourceLimitMetric = GetFirstMetricByName(record, metricName); NUnit.Framework.Assert.IsNotNull(resourceLimitMetric); Assert.Equal(expectedValue, resourceLimitMetric.Value()); }
/// <summary> /// Utility method to create and return new metrics record instance within the /// given context. /// </summary> /// <remarks> /// Utility method to create and return new metrics record instance within the /// given context. This record is tagged with the host name. /// </remarks> /// <param name="context">the context</param> /// <param name="recordName">name of the record</param> /// <returns>newly created metrics record</returns> public static MetricsRecord CreateRecord(MetricsContext context, string recordName ) { MetricsRecord metricsRecord = context.CreateRecord(recordName); metricsRecord.SetTag("hostName", GetHostName()); return(metricsRecord); }
public static void AssertTag(MetricsRecord record, string tagName, string expectedValue ) { MetricsTag processIdTag = GetFirstTagByName(record, tagName); NUnit.Framework.Assert.IsNotNull(processIdTag); Assert.Equal(expectedValue, processIdTag.Value()); }
public virtual void TestNullTag() { MetricsCache cache = new MetricsCache(); MetricsRecord mr = MakeRecord("r", Arrays.AsList(MakeTag("t", null)), Arrays.AsList (MakeMetric("m", 0), MakeMetric("m1", 1))); MetricsCache.Record cr = cache.Update(mr); Assert.True("t value should be null", null == cr.GetTag("t")); }
/// <summary>Creates a mock MetricsRecord with the given name and tags.</summary> /// <param name="name">String name</param> /// <param name="tags">List<MetricsTag> tags</param> /// <returns>MetricsRecord newly created mock</returns> private static MetricsRecord MockMetricsRecord(string name, IList <MetricsTag> tags ) { MetricsRecord record = Org.Mockito.Mockito.Mock <MetricsRecord>(); Org.Mockito.Mockito.When(record.Name()).ThenReturn(name); Org.Mockito.Mockito.When(record.Tags()).ThenReturn(tags); return(record); }
/// <summary>Creates a new instance of JvmMetrics</summary> private JvmMetrics(string processName, string sessionId, string recordName) { MetricsContext context = MetricsUtil.GetContext("jvm"); metrics = MetricsUtil.CreateRecord(context, recordName); metrics.SetTag("processName", processName); metrics.SetTag("sessionId", sessionId); context.RegisterUpdater(this); }
private MetricsRecord MakeRecord(string name, ICollection <MetricsTag> tags, ICollection <AbstractMetric> metrics) { MetricsRecord mr = Org.Mockito.Mockito.Mock <MetricsRecord>(); Org.Mockito.Mockito.When(mr.Name()).ThenReturn(name); Org.Mockito.Mockito.When(mr.Tags()).ThenReturn(tags); Org.Mockito.Mockito.When(mr.Metrics()).ThenReturn(metrics); return(mr); }
/// <summary>Push the metric to the mr.</summary> /// <remarks> /// Push the metric to the mr. /// The metric is pushed only if it was updated since last push /// Note this does NOT push to JMX /// (JMX gets the info via /// <see cref="Get()"/> /// </remarks> /// <param name="mr"/> public override void PushMetric(MetricsRecord mr) { lock (this) { if (changed) { mr.SetMetric(GetName(), value); } changed = false; } }
public override bool Equals(object obj) { if (obj is MetricsRecord) { MetricsRecord other = (MetricsRecord)obj; return(Objects.Equal(Timestamp(), other.Timestamp()) && Objects.Equal(Name(), other .Name()) && Objects.Equal(Description(), other.Description()) && Objects.Equal(Tags (), other.Tags()) && Iterables.ElementsEqual(Metrics(), other.Metrics())); } return(false); }
internal ShuffleClientMetrics(TaskAttemptID reduceId, JobConf jobConf) { this.numCopiers = jobConf.GetInt(MRJobConfig.ShuffleParallelCopies, 5); MetricsContext metricsContext = MetricsUtil.GetContext("mapred"); this.shuffleMetrics = MetricsUtil.CreateRecord(metricsContext, "shuffleInput"); this.shuffleMetrics.SetTag("user", jobConf.GetUser()); this.shuffleMetrics.SetTag("jobName", jobConf.GetJobName()); this.shuffleMetrics.SetTag("jobId", reduceId.GetJobID().ToString()); this.shuffleMetrics.SetTag("taskId", reduceId.ToString()); this.shuffleMetrics.SetTag("sessionId", jobConf.GetSessionId()); metricsContext.RegisterUpdater(this); }
public LocalJobRunnerMetrics(JobConf conf) { string sessionId = conf.GetSessionId(); // Initiate JVM Metrics JvmMetrics.Init("JobTracker", sessionId); // Create a record for map-reduce metrics MetricsContext context = MetricsUtil.GetContext("mapred"); // record name is jobtracker for compatibility metricsRecord = MetricsUtil.CreateRecord(context, "jobtracker"); metricsRecord.SetTag("sessionId", sessionId); context.RegisterUpdater(this); }
/// <summary>Push the delta metrics to the mr.</summary> /// <remarks> /// Push the delta metrics to the mr. /// The delta is since the last push/interval. /// Note this does NOT push to JMX /// (JMX gets the info via /// <see cref="previousIntervalValue"/> /// </remarks> /// <param name="mr"/> public override void PushMetric(MetricsRecord mr) { lock (this) { IntervalHeartBeat(); try { mr.IncrMetric(GetName(), GetPreviousIntervalValue()); } catch (Exception e) { Log.Info("pushMetric failed for " + GetName() + "\n", e); } } }
/// <summary>Push the delta metrics to the mr.</summary> /// <remarks> /// Push the delta metrics to the mr. /// The delta is since the last push/interval. /// Note this does NOT push to JMX /// (JMX gets the info via /// <see cref="GetPreviousIntervalAverageTime()"/> /// and /// <see cref="GetPreviousIntervalNumOps()"/> /// </remarks> /// <param name="mr"/> public override void PushMetric(MetricsRecord mr) { lock (this) { IntervalHeartBeat(); try { mr.IncrMetric(GetName() + "_num_ops", GetPreviousIntervalNumOps()); mr.SetMetric(GetName() + "_avg_time", GetPreviousIntervalAverageTime()); } catch (Exception e) { Log.Info("pushMetric failed for " + GetName() + "\n", e); } } }
public virtual void TestGet() { MetricsCache cache = new MetricsCache(); NUnit.Framework.Assert.IsNull("empty", cache.Get("r", Arrays.AsList(MakeTag("t", "t")))); MetricsRecord mr = MakeRecord("r", Arrays.AsList(MakeTag("t", "t")), Arrays.AsList (MakeMetric("m", 1))); cache.Update(mr); MetricsCache.Record cr = cache.Get("r", mr.Tags()); Log.Debug("tags=" + mr.Tags() + " cr=" + cr); NUnit.Framework.Assert.IsNotNull("Got record", cr); Assert.Equal("contains 1 metric", 1, cr.Metrics().Count); CheckMetricValue("new metric value", cr, "m", 1); }
private void CheckMetricsRecords(IList <MetricsRecord> recs) { Log.Debug(recs); MetricsRecord r = recs[0]; Assert.Equal("name", "s1rec", r.Name()); MoreAsserts.AssertEquals("tags", new MetricsTag[] { Interns.Tag(MsInfo.Context, "test" ), Interns.Tag(MsInfo.Hostname, hostname) }, r.Tags()); MoreAsserts.AssertEquals("metrics", ((MetricsRecordBuilderImpl)((MetricsRecordBuilderImpl )((MetricsRecordBuilderImpl)((MetricsRecordBuilderImpl)MetricsLists.Builder(string.Empty ).AddCounter(Interns.Info("C1", "C1 desc"), 1L)).AddGauge(Interns.Info("G1", "G1 desc" ), 2L)).AddCounter(Interns.Info("S1NumOps", "Number of ops for s1"), 1L)).AddGauge (Interns.Info("S1AvgTime", "Average time for s1"), 0.0)).Metrics(), r.Metrics()); r = recs[1]; Assert.True("NumActiveSinks should be 3", Iterables.Contains(r. Metrics(), new MetricGaugeInt(MsInfo.NumActiveSinks, 3))); }
public virtual void PutMetrics(MetricsRecord record) { // No need to hang every time, just the first record. if (!firstTime) { gotCalledSecondTime = true; return; } firstTime = false; try { Thread.Sleep(10 * 1000); } catch (Exception) { interrupted = true; } }
/// <summary>Push the metric to the mr.</summary> /// <remarks> /// Push the metric to the mr. /// The metric is pushed only if it was updated since last push /// Note this does NOT push to JMX /// (JMX gets the info via /// <see cref="Get()"/> /// </remarks> /// <param name="mr"/> public override void PushMetric(MetricsRecord mr) { lock (this) { if (changed) { try { mr.SetMetric(GetName(), value); } catch (Exception e) { Log.Info("pushMetric failed for " + GetName() + "\n", e); } } changed = false; } }
public virtual void PutMetrics(MetricsRecord record) { StringBuilder lines = new StringBuilder(); StringBuilder metricsPathPrefix = new StringBuilder(); // Configure the hierarchical place to display the graph. metricsPathPrefix.Append(metricsPrefix).Append(".").Append(record.Context()).Append (".").Append(record.Name()); foreach (MetricsTag tag in record.Tags()) { if (tag.Value() != null) { metricsPathPrefix.Append("."); metricsPathPrefix.Append(tag.Name()); metricsPathPrefix.Append("="); metricsPathPrefix.Append(tag.Value()); } } // The record timestamp is in milliseconds while Graphite expects an epoc time in seconds. long timestamp = record.Timestamp() / 1000L; // Collect datapoints. foreach (AbstractMetric metric in record.Metrics()) { lines.Append(metricsPathPrefix.ToString() + "." + metric.Name().Replace(' ', '.') ).Append(" ").Append(metric.Value()).Append(" ").Append(timestamp).Append("\n"); } try { graphite.Write(lines.ToString()); } catch (Exception e) { Log.Warn("Error sending metrics to Graphite", e); try { graphite.Close(); } catch (Exception e1) { throw new MetricsException("Error closing connection to Graphite", e1); } } }
public virtual void PutMetrics(MetricsRecord record) { string prefix = "threadSourceRec"; if (record.Name().StartsWith(prefix)) { int recordNumber = System.Convert.ToInt32(Runtime.Substring(record.Name() , prefix.Length)); AList <string> names = new AList <string>(); foreach (AbstractMetric m in record.Metrics()) { if (Runtime.EqualsIgnoreCase(m.Name(), "g1")) { collected[recordNumber].Set(m.Value()); return; } names.AddItem(m.Name()); } } }
public virtual void AppendPrefix(MetricsRecord record, StringBuilder sb) { string contextName = record.Context(); ICollection <MetricsTag> tags = record.Tags(); if (useTagsMap.Contains(contextName)) { ICollection <string> useTags = useTagsMap[contextName]; foreach (MetricsTag t in tags) { if (useTags == null || useTags.Contains(t.Name())) { // the context is always skipped here because it is always added // the hostname is always skipped to avoid case-mismatches // from different DNSes. if (t.Info() != MsInfo.Context && t.Info() != MsInfo.Hostname && t.Value() != null) { sb.Append('.').Append(t.Name()).Append('=').Append(t.Value()); } } } } }
public virtual void TestUpdate() { MetricsCache cache = new MetricsCache(); MetricsRecord mr = MakeRecord("r", Arrays.AsList(MakeTag("t", "tv")), Arrays.AsList (MakeMetric("m", 0), MakeMetric("m1", 1))); MetricsCache.Record cr = cache.Update(mr); Org.Mockito.Mockito.Verify(mr).Name(); Org.Mockito.Mockito.Verify(mr).Tags(); Org.Mockito.Mockito.Verify(mr).Metrics(); Assert.Equal("same record size", cr.Metrics().Count, ((ICollection <AbstractMetric>)mr.Metrics()).Count); Assert.Equal("same metric value", 0, cr.GetMetric("m")); MetricsRecord mr2 = MakeRecord("r", Arrays.AsList(MakeTag("t", "tv")), Arrays.AsList (MakeMetric("m", 2), MakeMetric("m2", 42))); cr = cache.Update(mr2); Assert.Equal("contains 3 metric", 3, cr.Metrics().Count); CheckMetricValue("updated metric value", cr, "m", 2); CheckMetricValue("old metric value", cr, "m1", 1); CheckMetricValue("new metric value", cr, "m2", 42); MetricsRecord mr3 = MakeRecord("r", Arrays.AsList(MakeTag("t", "tv3")), Arrays.AsList (MakeMetric("m3", 3))); // different tag value cr = cache.Update(mr3); // should get a new record Assert.Equal("contains 1 metric", 1, cr.Metrics().Count); CheckMetricValue("updated metric value", cr, "m3", 3); // tags cache should be empty so far Assert.Equal("no tags", 0, cr.Tags().Count); // until now cr = cache.Update(mr3, true); Assert.Equal("Got 1 tag", 1, cr.Tags().Count); Assert.Equal("Tag value", "tv3", cr.GetTag("t")); CheckMetricValue("Metric value", cr, "m3", 3); }
public override void PutMetrics(MetricsRecord record) { // The method handles both cases whether Ganglia support dense publish // of metrics of sparse (only on change) publish of metrics try { string recordName = record.Name(); string contextName = record.Context(); StringBuilder sb = new StringBuilder(); sb.Append(contextName); sb.Append('.'); sb.Append(recordName); AppendPrefix(record, sb); string groupName = sb.ToString(); sb.Append('.'); int sbBaseLen = sb.Length; string type = null; AbstractGangliaSink.GangliaSlope slopeFromMetric = null; AbstractGangliaSink.GangliaSlope calculatedSlope = null; MetricsCache.Record cachedMetrics = null; ResetBuffer(); // reset the buffer to the beginning if (!IsSupportSparseMetrics()) { // for sending dense metrics, update metrics cache // and get the updated data cachedMetrics = metricsCache.Update(record); if (cachedMetrics != null && cachedMetrics.MetricsEntrySet() != null) { foreach (KeyValuePair <string, AbstractMetric> entry in cachedMetrics.MetricsEntrySet ()) { AbstractMetric metric = entry.Value; sb.Append(metric.Name()); string name = sb.ToString(); // visit the metric to identify the Ganglia type and // slope metric.Visit(gangliaMetricVisitor); type = gangliaMetricVisitor.GetType(); slopeFromMetric = gangliaMetricVisitor.GetSlope(); GangliaConf gConf = GetGangliaConfForMetric(name); calculatedSlope = CalculateSlope(gConf, slopeFromMetric); // send metric to Ganglia EmitMetric(groupName, name, type, metric.Value().ToString(), gConf, calculatedSlope ); // reset the length of the buffer for next iteration sb.Length = sbBaseLen; } } } else { // we support sparse updates ICollection <AbstractMetric> metrics = (ICollection <AbstractMetric>)record.Metrics (); if (metrics.Count > 0) { // we got metrics. so send the latest foreach (AbstractMetric metric in record.Metrics()) { sb.Append(metric.Name()); string name = sb.ToString(); // visit the metric to identify the Ganglia type and // slope metric.Visit(gangliaMetricVisitor); type = gangliaMetricVisitor.GetType(); slopeFromMetric = gangliaMetricVisitor.GetSlope(); GangliaConf gConf = GetGangliaConfForMetric(name); calculatedSlope = CalculateSlope(gConf, slopeFromMetric); // send metric to Ganglia EmitMetric(groupName, name, type, metric.Value().ToString(), gConf, calculatedSlope ); // reset the length of the buffer for next iteration sb.Length = sbBaseLen; } } } } catch (IOException io) { throw new MetricsException("Failed to putMetrics", io); } }
internal MetricsRecordFiltered(MetricsRecord delegate_, MetricsFilter filter) { this.delegate_ = delegate_; this.filter = filter; }