public void LoggerNameIsCopied(string loggerName, DateTime dateTime, MetricsData metricsData, LoggingEventMapper sut) { var actual = sut.MapToLoggingEvent(loggerName, dateTime, metricsData); Assert.Equal(loggerName, actual.LoggerName); }
public void TicksIsCreated(string loggerName, DateTime dateTime, MetricsData metricsData, LoggingEventMapper sut) { var actual = sut.MapToLoggingEvent(loggerName, dateTime, metricsData); Assert.Equal(dateTime.Ticks.ToString("D"), actual.Properties["Ticks"]); }
public static void ChromIntMetrics(this QcDataContainer qcData, RawDataCollection rawData, MetricsData metrics) { double firstRtToExceed10 = 0; double lastRtToExceed10 = 0; double proportionCovered; var scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms]; var reversedScans = scans.Reverse(); var totalIntList = (from x in scans select rawData.metaData[x].SummedIntensity).ToArray(); // get Q1 of total intensity from all scans double threshold = totalIntList.Max() / 10; // get first RT which exceeds Q1 for (int i = 0; i < scans.Length; i++) { int scan = scans[i]; if (totalIntList.MovingAverage(i, 20) > threshold) { firstRtToExceed10 = rawData.retentionTimes[scan]; break; } } for (int i = scans.Length - 1; i >= 0; i--) { int scan = scans[i]; if (totalIntList.MovingAverage(i, 20) > threshold) { lastRtToExceed10 = rawData.retentionTimes[scan]; break; } } // get proportion of run encompassed by these times //proportionCovered = (lastRtToExceedQ1 - firstRtToExceedQ1) / metrics.TotalAnalysisTime; proportionCovered = (lastRtToExceed10 - firstRtToExceed10) / rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Ms].Last()]; qcData.TimeBeforeFirstScanToExceedPoint1MaxIntensity = firstRtToExceed10;// - rawData.retentionTimes[1]; qcData.TimeAfterLastScanToExceedPoint1MaxIntensity = rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Ms].Last()] - lastRtToExceed10; qcData.FractionOfRunAbovePoint1MaxIntensity = proportionCovered; }
public void LevelIsInfo(string loggerName, DateTime dateTime, MetricsData metricsData, LoggingEventMapper sut) { var actual = sut.MapToLoggingEvent(loggerName, dateTime, metricsData); Assert.Equal(Level.Info, actual.Level); }
/// <summary> /// Called when request has been made /// </summary> /// <param name="asyncResult"></param> private static void OnGetContext(IAsyncResult asyncResult) { var context = httpListener.EndGetContext(asyncResult); var urlExists = false; // There are only two types of urls avaiable // Made simple check to get data for each of them if (context.Request.Url.Segments.Length == 2) { // If url is http://127.0.0.1:{port}/sensors if (context.Request.Url.Segments[1] == "sensors") { context.Response.StatusCode = 200; context.Response.StatusDescription = "OK"; context.Response.ContentType = "application/json"; try { var sensorList = MetricsData.GetSensorListWithLastMeasure(); var jsonData = Newtonsoft.Json.JsonConvert.SerializeObject(sensorList); var byteData = Encoding.UTF8.GetBytes(jsonData); context.Response.OutputStream.Write(byteData, 0, byteData.Length); } catch { context.Response.StatusCode = 500; context.Response.StatusDescription = "Internal Server Error"; } context.Response.Close(); urlExists = true; } } else if (context.Request.Url.Segments.Length == 3) { // If url is http://127.0.0.1:{port}/measures/2019-09-09 if (context.Request.Url.Segments[1].Replace("/", string.Empty) == "measures") { var date = context.Request.Url.Segments[2]; context.Response.StatusCode = 200; context.Response.StatusDescription = "OK"; context.Response.ContentType = "application/json"; try { var measureList = MetricsData.GetSensorMeasuresForDate(date); var jsonData = Newtonsoft.Json.JsonConvert.SerializeObject(measureList); var byteData = Encoding.UTF8.GetBytes(jsonData); context.Response.OutputStream.Write(byteData, 0, byteData.Length); } catch { context.Response.StatusCode = 500; context.Response.StatusDescription = "Internal Server Error"; } context.Response.Close(); urlExists = true; } } // If wrong url specified return 404 Not Found error if (!urlExists) { context.Response.StatusCode = 404; context.Response.StatusDescription = "Not Found"; context.Response.Close(); } // Process next request ProcessFirstOrNextRequest(); }
public static JsonMetricsContext FromContext(MetricsData contextData) { return(FromContext(contextData, null, null)); }
public static string Serialize(MetricsData metricsData) { return(new JsonBuilder().BuildJson(metricsData, Clock.Default)); }
public static string BuildJson(MetricsData data) { return(BuildJson(data, AppEnvironment.Current, Clock.Default, indented: DefaultIndented)); }
static int DoStuff(ArgumentParser.ParseOptions opts) { List <string> files = new List <string>(); if (opts.InputFiles.Count() > 0) // did the user give us a list of files? { List <string> problems = new List <string>(); files = opts.InputFiles.ToList(); // check if the list provided contains only .raw files foreach (string file in files) { if (!file.EndsWith(".raw", StringComparison.OrdinalIgnoreCase)) { problems.Add(file); } } if (problems.Count() == 1) { Console.WriteLine("\nERROR: {0} does not appear to be a .raw file. Invoke '>RawTools --help' if you need help.", problems.ElementAt(0)); Log.Error("Invalid file provided: {0}", problems.ElementAt(0)); return(1); } if (problems.Count() > 1) { Console.WriteLine("\nERROR: The following {0} files do not appear to be .raw files. Invoke '>RawTools --help' if you need help." + "\n\n{1}", problems.Count(), String.Join("\n", problems)); Log.Error("Invalid files provided: {0}", String.Join(" ", problems)); return(1); } Log.Information("Files to be processed, provided as list: {0}", String.Join(" ", files)); } else // did the user give us a directory? { if (Directory.Exists(opts.InputDirectory)) { files = Directory.GetFiles(opts.InputDirectory, "*.*", SearchOption.TopDirectoryOnly) .Where(s => s.EndsWith(".raw", StringComparison.OrdinalIgnoreCase)).ToList(); } else { Console.WriteLine("ERROR: The provided directory does not appear to be valid."); Log.Error("Invalid directory provided: {0}", opts.InputDirectory); return(1); } Log.Information("Files to be processed, provided as directory: {0}", String.Join(" ", files)); } if (opts.Quant) { List <string> possible = new List <string>() { "TMT0", "TMT2", "TMT6", "TMT10", "TMT11", "iTRAQ4", "iTRAQ8" }; if (!possible.Contains(opts.LabelingReagents)) { Console.WriteLine("ERROR: For quantification, the labeling reagent must be one of {TMT0, TMT2, TMT6, TMT10, TMT11, iTRAQ4, iTRAQ8}"); Log.Error("Invalid labeling reagent provided: {0}", opts.LabelingReagents); return(1); } } if (opts.Chromatogram != null) { List <string> possible = new List <string>() { "1T", "2T", "3T", "1B", "2B", "3B", "1TB", "2TB", "3TB", "1TB", "2TB", "3TB" }; if (!possible.Contains(opts.Chromatogram)) { Console.WriteLine("ERROR: Incorrect format for --chro. See help."); Log.Error("Invalid chromatogram argument provided: {Chro}", opts.Chromatogram); return(1); } } System.Diagnostics.Stopwatch singleFileTime = new System.Diagnostics.Stopwatch(); System.Diagnostics.Stopwatch totalTime = new System.Diagnostics.Stopwatch(); totalTime.Start(); foreach (string file in files) { singleFileTime.Start(); Console.WriteLine("\nProcessing: {0}\n", file); using (IRawDataPlus rawFile = RawFileReaderFactory.ReadFile(fileName: file)) { rawFile.SelectInstrument(Device.MS, 1); Log.Information("Now processing: {File} --- Instrument: {Instrument}", Path.GetFileName(file), rawFile.GetInstrumentData().Name); RawDataCollection rawData = new RawDataCollection(rawFile: rawFile); QuantDataCollection quantData = new QuantDataCollection(); bool isBoxCar = rawData.isBoxCar; if (rawData.isBoxCar) { Console.WriteLine("\nRaw file appears to be a boxcar-type experiment. Precursor peak analysis won't be performed!\n"); } if (opts.ParseData | opts.Metrics | opts.Quant) { rawData.ExtractAll(rawFile); if (!isBoxCar) { rawData.CalcPeakRetTimesAndInts(rawFile: rawFile); } } if (opts.Quant) { rawData.quantData.Quantify(rawData: rawData, rawFile: rawFile, labelingReagent: opts.LabelingReagents); } if (opts.UnlabeledQuant & !isBoxCar) { rawData.QuantifyPrecursorPeaks(rawFile); } if (opts.Metrics) { rawData.metaData.AggregateMetaData(rawData, rawFile); } if (opts.ParseData | opts.Quant) { if (opts.Quant) { Parse.WriteMatrix(rawData: rawData, rawFile: rawFile, metaData: rawData.metaData, quantData: rawData.quantData, outputDirectory: opts.OutputDirectory); } else { Parse.WriteMatrix(rawData: rawData, rawFile: rawFile, metaData: rawData.metaData, outputDirectory: opts.OutputDirectory); } } if (opts.WriteMGF) { MGF.WriteMGF(rawData: rawData, rawFile: rawFile, outputDirectory: opts.OutputDirectory, cutoff: opts.MassCutOff, intensityCutoff: opts.IntensityCutoff); } if (opts.Metrics) { MetricsData metricsData = new MetricsData(); if (opts.Quant) { metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile, quantData: rawData.quantData); } else { metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile); } metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile); Metrics.WriteMatrix(rawData, metricsData, opts.OutputDirectory); } if (opts.Chromatogram != null) { int order = Convert.ToInt32((opts.Chromatogram.ElementAt(0).ToString())); if (order > (int)rawData.methodData.AnalysisOrder) { Log.Error("Specified MS order ({Order}) for chromatogram is higher than experiment order ({ExpOrder})", (MSOrderType)order, rawData.methodData.AnalysisOrder); Console.WriteLine("Specified MS order ({0}) for chromatogram is higher than experiment order ({1}). Chromatogram(s) won't be written.", (MSOrderType)order, rawData.methodData.AnalysisOrder); } else { rawData.WriteChromatogram(rawFile, (MSOrderType)order, opts.Chromatogram.Contains("T"), opts.Chromatogram.Contains("B"), opts.OutputDirectory); } } } singleFileTime.Stop(); Console.WriteLine("\nElapsed time: {0} s", Math.Round(Convert.ToDouble(singleFileTime.ElapsedMilliseconds) / 1000.0, 2)); singleFileTime.Reset(); } totalTime.Stop(); Console.WriteLine("\nTime to process all {0} files: {1}", files.Count(), totalTime.Elapsed); return(0); }
public void Consume(MetricsData data) { contexts.AddOrUpdate(data.Context, data, (context, currentData) => data); }
public void RunReport(MetricsData data, Func <HealthStatus> healthStatus, CancellationToken token) { Report(data); }
public static IEnumerable <Google.ICanReportToGoogleAnalytics> AsGoogleAnalytics(this MetricsData metrics) { return(MetricsToGoogleMapper.Map(metrics)); }
public static QcDataContainer ProcessQcData(this QcDataCollection Data, RawDataCollection rawData, IRawDataPlus rawFile, string qcDirectory, string fastaDB = null) { DateTime dateAcquired = rawFile.CreationDate; //RawDataCollection rawData = new RawDataCollection(rawFile); MetricsData metricsData = new MetricsData(); metricsData.GetMetricsData(metaData: rawData.metaData, rawData: rawData, rawFile: rawFile); QcDataContainer qcData = new QcDataContainer(rawData.rawFileName, dateAcquired); qcData.Instrument = rawData.instrument; qcData.ExperimentMsOrder = rawData.methodData.AnalysisOrder; qcData.Ms1Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms].ToString(); qcData.Ms2Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms2].ToString(); if (qcData.ExperimentMsOrder == MSOrderType.Ms3) { qcData.Ms3Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms3].ToString(); } else { qcData.Ms3Analyzer = "None"; } qcData.TotalScans = metricsData.TotalScans; qcData.NumMs1Scans = metricsData.MS1Scans; qcData.NumMs2Scans = metricsData.MS2Scans; qcData.NumMs3Scans = metricsData.MS3Scans; qcData.Ms1ScanRate = metricsData.MS1ScanRate; qcData.Ms2ScanRate = metricsData.MS2ScanRate; qcData.MeanDutyCycle = metricsData.MeanDutyCycle; qcData.MeanTopN = metricsData.MeanTopN; qcData.MedianPrecursorIntensity = metricsData.MedianPrecursorIntensity; qcData.MedianSummedMs2Intensity = metricsData.MedianSummedMS2Intensity; qcData.MedianMs1IsolationInterference = metricsData.MedianMs1IsolationInterference; qcData.MedianMs2FractionConsumingTop80PercentTotalIntensity = metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity; qcData.NumEsiStabilityFlags = NumberOfEsiFlags(rawData); qcData.QuantMeta = metricsData.QuantMeta; qcData.GradientTime = metricsData.Gradient; qcData.ColumnPeakCapacity = metricsData.PeakCapacity; qcData.ChromIntMetrics(rawData, metricsData); if (!rawData.isBoxCar) { qcData.PeakShape.Asymmetry.P10 = rawData.peakData.PeakShapeMedians.Asymmetry.P10; qcData.PeakShape.Asymmetry.P50 = rawData.peakData.PeakShapeMedians.Asymmetry.P50; qcData.PeakShape.Width.P10 = rawData.peakData.PeakShapeMedians.Width.P10; qcData.PeakShape.Width.P50 = rawData.peakData.PeakShapeMedians.Width.P50; } // add the signal-to-noise distribution to the QC data. These are presented as "median of the ith percentile", so for example we take all the 10th percentile values of // the S2N and put them in a list, then report the median of that list qcData.MedianSummedMs1Intensity = (from x in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms] select rawData.centroidStreams[x].Intensities.Sum()).ToArray().Percentile(50); // add the fill-time distribution to the QC data. This is more straightforward. Just put all the fill times in an array and use it to instantiate a new distribution. qcData.Ms1FillTime = new Distribution((from x in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms] select rawData.trailerExtras[x].InjectionTime).ToArray()); qcData.Ms2FillTime = new Distribution((from x in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms2] select rawData.trailerExtras[x].InjectionTime).ToArray()); qcData.Ms3FillTime = new Distribution((from x in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms3] select rawData.trailerExtras[x].InjectionTime).ToArray()); //Data.QcData.Add(dateAcquired, newData); //Data.ProcessedRawFiles.Add(Path.GetFileName(rawData.rawFileName)); return(qcData); }
public void MetricsDataPropertiesAreCopied(string loggerName, DateTime dateTime, MetricsData metricsData, LoggingEventMapper sut) { var actual = sut.MapToLoggingEvent(loggerName, dateTime, metricsData); Assert.Equal(metricsData.MetricType, actual.Properties["MetricType"]); Assert.Equal(metricsData.MetricName, actual.Properties["MetricName"]); foreach (var value in metricsData.Values) { Assert.Equal(value.FormattedValue, actual.Properties[value.Name]); } }
public void RunReport(MetricsData metricsData, Func <HealthStatus> healthStatus) { RunReport(metricsData, healthStatus, CancellationToken.None); }
public static void VerifyExpiredMessageCount(string queueName, MetricsData data, long messageCount) { var count = GetExpiredMessageCount(data); Assert.Equal(messageCount, count); }
private static MetricsEndpointResponse GetJsonV2Response(MetricsData data, Func <HealthStatus> healthStatus, MetricsEndpointRequest request) { var json = JsonBuilderV2.BuildJson(data); return(new MetricsEndpointResponse(json, JsonBuilderV2.MetricsMimeType)); }
public static string BuildJson(MetricsData data) { return(BuildJson(data, Clock.Default, indented: DefaultIndented)); }
public static void GetMetricsData(this MetricsData metricsData, ScanMetaDataCollection metaData, RawDataCollection rawData, IRawDataPlus rawFile, QuantDataCollection quantData = null) { List <Operations> operations = new List <Operations> { Operations.ScanIndex, Operations.RetentionTimes, Operations.MethodData, Operations.MetaData }; if (!rawData.isBoxCar) { operations.Add(Operations.PeakRetAndInt); operations.Add(Operations.PeakShape); } rawData.Check(rawFile, operations); metricsData.RawFileName = rawData.rawFileName; metricsData.Instrument = rawData.instrument; metricsData.MS1Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms]; metricsData.MS2Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms2]; metricsData.TotalAnalysisTime = rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Any].Last()] - rawData.retentionTimes[rawData.scanIndex.ScanEnumerators[MSOrderType.Any].First()]; metricsData.TotalScans = rawData.scanIndex.allScans.Count(); metricsData.MS1Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms].Length; metricsData.MS2Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms2].Length; if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MS3Analyzer = rawData.methodData.MassAnalyzers[MSOrderType.Ms3]; metricsData.MS3Scans = rawData.scanIndex.ScanEnumerators[MSOrderType.Ms3].Length; } else { metricsData.MS3Analyzer = MassAnalyzerType.Any; metricsData.MS3Scans = 0; } metricsData.MSOrder = rawData.methodData.AnalysisOrder; List <double> ms2intensities = new List <double>(); List <double> precursorIntensities = new List <double>(); List <double> ms1fillTimes = new List <double>(); List <double> ms2fillTimes = new List <double>(); List <double> ms3fillTimes = new List <double>(); List <double> ms2scansPerCycle = new List <double>(); List <double> dutyCycles = new List <double>(); List <double> fractionConsuming80 = new List <double>(); foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms]) { ms1fillTimes.Add(metaData[scan].FillTime); ms2scansPerCycle.Add(metaData[scan].MS2ScansPerCycle); dutyCycles.Add(metaData[scan].DutyCycle); } foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms2]) { precursorIntensities.Add(rawData.peakData[scan].ParentIntensity); ms2intensities.Add(metaData[scan].SummedIntensity); ms2fillTimes.Add(metaData[scan].FillTime); fractionConsuming80.Add(metaData[scan].FractionConsumingTop80PercentTotalIntensity); } if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { foreach (int scan in rawData.scanIndex.ScanEnumerators[MSOrderType.Ms3]) { ms3fillTimes.Add(metaData[scan].FillTime); } } metricsData.MedianPrecursorIntensity = precursorIntensities.ToArray().Percentile(50); metricsData.MedianMs2FractionConsumingTop80PercentTotalIntensity = fractionConsuming80.ToArray().Percentile(50); metricsData.MedianSummedMS2Intensity = ms2intensities.ToArray().Percentile(50); metricsData.MedianMS1FillTime = ms1fillTimes.ToArray().Percentile(50); metricsData.MedianMS2FillTime = ms2fillTimes.ToArray().Percentile(50); if (rawData.methodData.AnalysisOrder == MSOrderType.Ms3) { metricsData.MedianMS3FillTime = ms3fillTimes.ToArray().Percentile(50); } else { metricsData.MedianMS3FillTime = -1; } metricsData.MeanTopN = ms2scansPerCycle.Average(); metricsData.MeanDutyCycle = dutyCycles.Average(); metricsData.MS1ScanRate = metricsData.MS1Scans / metricsData.TotalAnalysisTime; metricsData.MS2ScanRate = metricsData.MS2Scans / metricsData.TotalAnalysisTime; metricsData.MS3ScanRate = metricsData.MS3Scans / metricsData.TotalAnalysisTime; // only do the following if it isn't a boxcar experiment if (!rawData.isBoxCar) { metricsData.MedianBaselinePeakWidth = rawData.peakData.PeakShapeMedians.Width.P10; metricsData.MedianHalfHeightPeakWidth = rawData.peakData.PeakShapeMedians.Width.P50; // we can't access the instrument method in Linux, so we will assume the gradient length is the length of the MS acquisition metricsData.Gradient = rawData.retentionTimes[rawData.scanIndex.allScans.Keys.Max()]; metricsData.PeakCapacity = metricsData.Gradient / metricsData.MedianHalfHeightPeakWidth; metricsData.MedianAsymmetryFactor = rawData.peakData.PeakShapeMedians.Asymmetry.P10; } // add isolation interference metricsData.MedianMs1IsolationInterference = (from scan in rawData.scanIndex.ScanEnumerators[rawData.methodData.AnalysisOrder] select rawData.metaData[scan].Ms1IsolationInterference).ToArray().Percentile(50); // now add the quant meta data, if quant was performed double medianReporterIntensity = 0; QuantMetaData quantMetaData = new QuantMetaData(); SerializableDictionary <string, double> medianReporterIntensityByChannel = new SerializableDictionary <string, double>(); if (quantData != null & rawData.Performed.Contains(Operations.Quantification)) { string reagent = quantData.LabelingReagents; string[] allTags = new LabelingReagents().Reagents[reagent].Labels; List <double> allChannels = new List <double>(); Dictionary <string, List <double> > byChannel = new Dictionary <string, List <double> >(); foreach (string tag in allTags) { byChannel.Add(tag, new List <double>()); } foreach (int scan in rawData.scanIndex.ScanEnumerators[rawData.methodData.AnalysisOrder]) { foreach (string tag in allTags) { byChannel[tag].Add(quantData[scan][tag].Intensity); allChannels.Add(quantData[scan][tag].Intensity); } } medianReporterIntensity = allChannels.ToArray().Percentile(50); foreach (string tag in allTags) { medianReporterIntensityByChannel[tag] = byChannel[tag].ToArray().Percentile(50); } quantMetaData.medianReporterIntensity = medianReporterIntensity; quantMetaData.medianReporterIntensityByChannel = medianReporterIntensityByChannel; quantMetaData.quantTags = allTags; metricsData.QuantMeta = quantMetaData; metricsData.IncludesQuant = true; } }
public void AddsJvmGcPause() { var eventListenerMock = new Mock <ISimpleEventListener>(); ConcurrentDictionary <string, MetricsData> metrics = new ConcurrentDictionary <string, MetricsData>(); eventListenerMock.Setup(x => x.Metrics).Returns(metrics); var firstEvent = new MetricsData() { Name = "% Time in GC since last GC", Measurements = new List <Measurement>() { new Measurement() { Statistic = "VALUE", Value = 50 } }, AvailableTags = new List <AvailableTag>() { new AvailableTag() { Tag = "StandardDeviation", Values = new Dictionary <string, double> { { "StandardDeviation", 0.0 } }, }, new AvailableTag() { Tag = "Count", Values = new Dictionary <string, double> { { "Count", 1.0 } }, }, new AvailableTag() { Tag = "Min", Values = new Dictionary <string, double> { { "Min", 0.0 } }, }, new AvailableTag() { Tag = "Max", Values = new Dictionary <string, double> { { "Max", 0.0 } }, }, new AvailableTag() { Tag = "IntervalSec", Values = new Dictionary <string, double> { { "IntervalSec", 1.0 } }, }, new AvailableTag() { Tag = "Series", Values = new Dictionary <string, double> { { "Series", 1000.0 } }, }, }, }; var secondEvent = new MetricsData() { Name = "% Time in GC since last GC", Measurements = new List <Measurement>() { new Measurement() { Statistic = "VALUE", Value = 25 } }, AvailableTags = new List <AvailableTag>() { new AvailableTag() { Tag = "StandardDeviation", Values = new Dictionary <string, double> { { "StandardDeviation", 0.0 } }, }, new AvailableTag() { Tag = "Count", Values = new Dictionary <string, double> { { "Count", 1.0 } }, }, new AvailableTag() { Tag = "Min", Values = new Dictionary <string, double> { { "Min", 0.0 } }, }, new AvailableTag() { Tag = "Max", Values = new Dictionary <string, double> { { "Max", 0.0 } }, }, new AvailableTag() { Tag = "IntervalSec", Values = new Dictionary <string, double> { { "IntervalSec", 1.0 } }, }, new AvailableTag() { Tag = "Series", Values = new Dictionary <string, double> { { "Series", 1000.0 } }, }, }, }; var systemStatisticsProviderMock = new Mock <ISystemStatisticsProvider>(); systemStatisticsProviderMock.Setup(x => x.GetGCCount()).Returns(2); var basicMetricsProvider = new BasicMetricsProvider(loggerMock, eventListenerMock.Object, systemStatisticsProviderMock.Object); eventListenerMock.Raise(x => x.GCCollectionEvent += null, new GcTotalTimeEventArgs(firstEvent)); eventListenerMock.Raise(x => x.GCCollectionEvent += null, new GcTotalTimeEventArgs(secondEvent)); basicMetricsProvider.GetMetricNames().Should().Contain("jvm.gc.pause"); var metric = basicMetricsProvider.GetMetricByName("jvm.gc.pause"); metric.Measurements.Should().HaveCount(3); metric.Measurements.First(x => x.Statistic == "COUNT").Value.Should().Be(2); metric.Measurements.First(x => x.Statistic == "TOTAL_TIME").Value.Should().Be(0.75); metric.Measurements.First(x => x.Statistic == "MAX").Value.Should().Be(0.5); }
public static void AssertEquals(this MetricsData metricsData, MetricsDataEquatable testData) { var counters = metricsData.Counters.Where(a => a.Value.Count > 0).OrderBy(a => a.Name); var testCounters = testData.Counters.OrderBy(a => a.Name); counters.Select(a => a.Name).ShouldBe(testCounters.Select(a => a.Name)); counters.Select(a => a.Unit).ShouldBe(testCounters.Select(a => a.Unit)); foreach (var counter in testCounters.Where(s => s.SubCounters != null)) { metricsData.Counters.First(a => a.Name == counter.Name).Value.Items.Select(a => a.Item).ShouldBe(counter.SubCounters); } if (testData.CountersSettings.CheckValues) { counters.Select(a => a.Value.Count).ShouldBe(testCounters.Select(a => a.Value)); } //Timers var timers = metricsData.Timers.Where(a => a.Value.Histogram.Count > 0).OrderBy(a => a.Name); var testTimers = testData.Timers.OrderBy(a => a.Name); var timerNames = timers.Select(a => a.Name).ToArray(); var testTimersExpected = testTimers.Select(a => a.Name).ToArray(); timerNames.ShouldBe(testTimersExpected, $"Timers not recorded. In context {metricsData.Context}"); Assert.That(timers.Select(a => a.Unit), Is.EquivalentTo(testTimers.Select(a => a.Unit)), $"Timers units not correct. In context {metricsData.Context}"); if (testData.TimersSettings.CheckValues) { Assert.That(timers.Select(a => a.Value.Histogram.Count), Is.EquivalentTo(testTimers.Select(a => a.Value)), $"Timers values not correct. In context {metricsData.Context}"); } //Gauges var gauges = metricsData.Gauges.OrderBy(a => a.Name); var testGauges = testData.Gauges.OrderBy(a => a.Name); Assert.That(gauges.Select(a => a.Name), Is.EquivalentTo(testGauges.Select(a => a.Name)), $"Gauges not recorded. In context {metricsData.Context}"); Assert.That(gauges.Select(a => a.Unit), Is.EquivalentTo(testGauges.Select(a => a.Unit)), $"Gauges units not correct. In context {metricsData.Context}"); if (testData.GaugesSettings.CheckValues) { Assert.That(gauges.Select(a => a.Value), Is.EquivalentTo(testGauges.Select(a => a.Value)), $"Gauges values not correct. In context {metricsData.Context}"); } //Meters var meters = metricsData.Meters.Where(a => a.Value.Count > 0).OrderBy(a => a.Name); var testMeters = testData.Meters.OrderBy(a => a.Name); Assert.That(meters.Select(a => a.Name), Is.EquivalentTo(testMeters.Select(a => a.Name)), $"Meters not recorded. In context {metricsData.Context}"); Assert.That(meters.Select(a => a.Unit), Is.EquivalentTo(testMeters.Select(a => a.Unit)), $"Meters units not correct. In context {metricsData.Context}"); if (testData.MetersSettings.CheckValues) { Assert.That(meters.Select(a => a.Value.Count), Is.EquivalentTo(testMeters.Select(a => a.Value)), $"Meters values not correct. In context {metricsData.Context}"); } }
public static JsonMetricsContext FromContext(MetricsData contextData, string version, string timestamp) { return(FromContext(contextData, Enumerable.Empty <EnvironmentEntry>(), version, timestamp)); }
public string GenerateReport() { MetricsData metricsData = this._metricsContext.DataProvider.CurrentMetricsData; return(StringReport.RenderMetrics(metricsData, HealthChecks.GetStatus)); }