/// <summary> /// Initializes a new instance of the <see cref="MetricTelemetry"/> class with empty /// properties. /// </summary> public MetricTelemetry() { this.Data = new MetricData(); this.Metric = new DataPoint(); this.context = new TelemetryContext(this.Data.properties, new Dictionary<string, string>()); // We always have a single 'metric'. this.Data.metrics.Add(this.Metric); }
protected override void ReportHistogram(string name, MetricData.HistogramValue value, Unit unit, MetricTags tags) { var data = new Dictionary<string, object>(); value.AddHistogramValues(data); var keys = data.Keys.ToList(); var values = keys.Select(k => data[k]); Pack(name, keys, values, tags); }
protected override void ReportCounter(string name, MetricData.CounterValue value, Unit unit, MetricTags tags) { if (!value.Items.Any()) { Pack(name, value.Count, tags); return; } var cols = new List<string>(new[] { "total" }); cols.AddRange(value.Items.Select(x => x.Item)); var data = new List<object>(new object[] { value.Count }); data.AddRange(value.Items.Select(x => (object)x.Count)); Pack(name, cols, data, tags); }
private static void TestExportAsyncNoMetrics( AggregationType aggregationType, MetricData metricData) { MockStackdriverMetricsExporter.CallBase = true; var metric = new Metric("<metric-namespace>", "<metric-name>", "<metric-description>", aggregationType); metric.Data.Add(metricData); MockStackdriverMetricsExporter .Setup(cm => cm.UploadToGoogleCloudMonitoring(It.IsAny <TimeSeries>(), It.IsAny <TypedValue>(), It.IsAny <TimeInterval>())) .Throws(new Exception("UploadToGoogleCloudMonitoring should not be called.")); MockStackdriverMetricsExporter.Object.ExportAsync(new List <Metric> { metric }, CancellationToken.None); MockStackdriverMetricsExporter.Verify(); }
public void SetData_TestData_Success() { // Arrange var service = new MetricDataService(metricRepository.Object, metricDataRepository.Object, hourRepository.Object, serverRepository.Object); var data = new TestData { Id = 3, Name = "Tom", Time = DateTime.MinValue }; var metricData = new MetricData(); // Act service.SetData(metricData, data); // Assert Assert.That(metricData.Data, Is.Not.Null); Assert.That(metricData.Data, Is.Not.Empty); }
public async Task AuditAnalysisMetricLogic_ScoreMetric_NoMetricData() { // Arrange var metricData = new MetricData { Metric = new Metric { HourId = 123 }, ServerId = 234 }; var analyses = new AuditAnalysis[] { }; this.auditAnalysisRepository.Setup(r => r.ReadByMetricData(metricData)) .ReturnsAsync(analyses.ToList()); // Act var result = await logic.ScoreMetric(metricData); // Assert Assert.That(result, Is.EqualTo(Defaults.Scores.UserExperience)); }
public async Task UptimeScoring_ScoreMetricsAsync_WebDowntime(double?webUptimeScore, double?agentUptimeScore, bool expectedWebDowntimeResult) { // Arrange var hour = DateTime.Now; var webUptime = webUptimeScore.HasValue ? Convert.ToDecimal(webUptimeScore.Value) : (decimal?)null; var agentUptime = agentUptimeScore.HasValue ? Convert.ToDecimal(agentUptimeScore.Value) : (decimal?)null; var webUptimeMetricData = new MetricData { Score = webUptime, Metric = new Metric { MetricType = MetricType.WebUptime, Hour = new Hour { HourTimeStamp = hour } } }; var agentUptimeMetricData = new MetricData { Score = agentUptime, Metric = new Metric { MetricType = MetricType.AgentUptime, Hour = new Hour { HourTimeStamp = hour } } }; var metricDatas = new List <MetricData> { agentUptimeMetricData, webUptimeMetricData }; this.uptimeRatingsRepository.Setup(r => r.Create(It.IsAny <decimal>(), hour, expectedWebDowntimeResult, false)).Returns(Task.Delay(1)); this.uptimeRatingsRepository.Setup(r => r.UpdateWeeklyScores()).Returns(Task.Delay(1)); this.uptimeRatingsRepository.Setup(r => r.UpdateQuartlyScores(hour)).Returns(Task.Delay(1)); if (metricDatas.Count > 0) { this.maintenanceWindowRepository.Setup(r => r.HourIsInMaintenanceWindowAsync(metricDatas[0].Metric.Hour)).ReturnsAsync(false); } var categoryScore = new CategoryScore(); // Act var logic = new UptimeScoringLogic(uptimeRatingsRepository.Object, maintenanceWindowRepository.Object, logger.Object); var result = logic.ScoreMetrics(categoryScore, metricDatas); await result; // Assert this.uptimeRatingsRepository.Verify(r => r.Create(It.IsAny <decimal>(), hour, expectedWebDowntimeResult, false)); }
public void SerializeMultipleProperties2IsCorrect() { var data = new MetricData( name: "Æ’/decode", dimensions: null, properties: new[] { new MetricDataProperty("count", 4565234), new MetricDataProperty("pixels", 100), new MetricDataProperty("time", new TimeSpan((long)(54.4522d * TimeSpan.TicksPerSecond))) }, timestamp: null ); Assert.Equal("Æ’/decode count=4565234i,pixels=100i,time=54.4522", data.ToString()); var data2 = MetricData.Parse(data.ToString()); Assert.Equal("Æ’/decode count=4565234i,pixels=100i,time=54.4522", data2.ToString()); }
public async Task Rto_CollectMetricData() { // Arrange var metricData = new MetricData { Server = new Server { ServerId = 123 }, Metric = new Metric { Hour = new Hour { Id = 3 } } }; this.databaseRepository.Setup(r => r.ReadByServerIdAsync(metricData.Server.ServerId)) .ReturnsAsync(new[] { new Database { LastBackupFullDuration = 1 * 60, LastBackupDiffDuration = 2 * 60, LogBackupsDuration = 3 * 60, Id = 1 }, new Database { LastBackupFullDuration = 2 * 60, LastBackupDiffDuration = 3 * 60, LogBackupsDuration = 4 * 60, Id = 2 }, new Database { LastBackupFullDuration = 3 * 60, LastBackupDiffDuration = 4 * 60, LogBackupsDuration = 5 * 60, Id = 3 }, // should be picked as the database with greatest time to recover }); this.recoverabilityIntegritySummaryReporter.Setup(m => m.UpdateWorstRto(metricData.Metric.Hour.Id, It.Is <int>(i => i == 3), It.IsInRange(11.9m, 12.1m, Range.Inclusive))).ReturnsAsyncDefault(); this.recoveryObjectivesReporter.Setup(m => m.UpdateRtoReport(It.IsAny <IList <DatabaseRtoScoreData> >())) .ReturnsAsyncDefault(); // Act var result = await this.rtoMetricLogic.CollectMetricData(metricData); // Assert Assert.That(result, Is.Not.Null); Assert.That(((RtoMetricLogic.RtoMetricData)result).TimeToRecover, Is.EqualTo(12)); this.recoverabilityIntegritySummaryReporter.VerifyAll(); this.recoveryObjectivesReporter.VerifyAll(); }
private void NewMetric(object sender, ProfilingCollection.NewMetricArgs args) { this.newAttachedLabel(args.name); int bufferLength = this.columns - 1; Label[] labels = new Label[bufferLength]; for (int i = 0; i < bufferLength; i++) { labels[i] = this.newAttachedLabel("" + 0); } //valueLabels2[args.name] = labels; MetricData data = new MetricData(); data.labels = labels; data.graphIndex = this.graphControl.AddMetric(); valueLabels[args.name] = data; }
public async Task GetMetricData_HourIdsServerCategoryType() { // Arrange var service = new MetricDataService(metricRepository.Object, metricDataRepository.Object, hourRepository.Object, serverRepository.Object); var metricData = new MetricData(); var metricDataId = 4; this.metricDataRepository.Setup(m => m.ReadAsync(metricDataId)).ReturnsAsync(metricData); var serverId = 3; var hourIds = new [] { 3, 4, 5 }; var categoryType = CategoryType.UserExperience; // Act var result = await service.GetMetricData(hourIds, serverId, categoryType); // Assert Assert.That(metricData.Data, Is.Null); }
public async Task CollectMetricData_WaitTillMetricHour() { // Arrange var metricData = new MetricData { MetricId = 444, ServerId = 555, Metric = new Metric { HourId = 222, Hour = new Hour { HourTimeStamp = DateTime.UtcNow.AddHours(1) } } }; this.metricDataService.Setup(s => s.GetMetricData(metricDataId)).ReturnsAsync(metricData); metricFactory.Setup(mf => mf.GetService(MetricType.AuditAnalysis)).Returns(mockMetricLogic.Object); // Act var result = await this.metricTask.CollectMetricData(metricDataId); // Assert Assert.That(result.Types.First(), Is.EqualTo(EventSourceType.CollectMetricData)); }
// For all databases, grab the RPO data relevant for the hour and return it public async Task <object> CollectMetricData(MetricData metricData) { // Get the largest gap in the gap table for each database var largestBackupGapsInHour = await this.databaseGapsRepository.ReadLargestGapsForEachDatabaseAsync <BackupAllGap>( metricData.Server, metricData.Metric.Hour, GapActivityType.Backup); // grab all the unresolved gaps var unresolvedGaps = await this.GetUnresolvedGaps(metricData.Metric.Hour, metricData.Server); // Join the list and take the largest gap values var mergedList = largestBackupGapsInHour.Concat(unresolvedGaps).GroupBy(g => g.DatabaseId) .Select(x => x.OrderByDescending(g => g.Duration).First()); // Score the list for the reports (not great, but best solution at the moment) var mergedScoredList = mergedList.Where(g => g.Duration.HasValue).Select( g => new DatabaseRpoScoreData { DatabaseId = g.DatabaseId, PotentialDataLoss = g.Duration, RpoScore = ScorePotentialDataLoss(g.Duration) }).ToList(); var worstRpo = mergedScoredList.FirstOrDefault(); // Report the scored list await this.recoveryObjectivesReporter.UpdateRpoReport(mergedScoredList); // This doesn't care about hour order, so it will be whatever the last reporter ran was. O_O // Report the worst database if (worstRpo != null) { await this.recoverabilityIntegritySummaryReporter.UpdateWorstRpo(metricData.Metric.Hour.Id, worstRpo.DatabaseId, worstRpo.PotentialDataLoss.Value); return(new RpoMetricData { PotentialDataLoss = worstRpo.PotentialDataLoss }); } return(null); }
public void MetricDataReadSuccessfully() { EventData eventData = new EventData(); eventData.Payload.Add("metricValue", 17.4); EventMetadata metricMetadata = new EventMetadata(MetricData.MetricMetadataKind); metricMetadata.Properties.Add(MetricData.MetricNameMoniker, "SomeMetric"); metricMetadata.Properties.Add(MetricData.MetricValueMoniker, "33.5"); // Fixed-value metric var result = MetricData.TryGetData(eventData, metricMetadata, out MetricData md); Assert.Equal(DataRetrievalStatus.Success, result.Status); Assert.Equal(33.5, md.Value, DoublePrecisionTolerance); Assert.Equal("SomeMetric", md.MetricName); // Value read from event properties metricMetadata.Properties.Remove(MetricData.MetricValueMoniker); metricMetadata.Properties.Add(MetricData.MetricValuePropertyMoniker, "metricValue"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.Success, result.Status); Assert.Equal(17.4, md.Value, DoublePrecisionTolerance); // Able to convert event property value to a double as needed eventData.Payload["metricValue"] = "3.14"; result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.Success, result.Status); Assert.Equal(3.14, md.Value, DoublePrecisionTolerance); //metric name value property eventData.Payload.Add("metricName", "customMetricName"); metricMetadata.Properties.Remove(MetricData.MetricNameMoniker); metricMetadata.Properties.Add(MetricData.MetricNamePropertyMoniker, "metricName"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.Success, result.Status); Assert.Equal(3.14, md.Value, DoublePrecisionTolerance); Assert.Equal("customMetricName", md.MetricName); }
public void Setup() { this.metricFactory = new Mock <IServiceFactory <IMetricLogic, MetricType> >(); this.isReadyMetricFactory = new Mock <IServiceFactory <IMetricReadyForDataCollectionLogic, MetricType> >(); hourRepository = new Mock <IHourRepository>(); metricDataRepository = new Mock <IMetricDataRepository>(); metricDataService = new Mock <IMetricDataService>(); mockMetricLogic = new Mock <IMetricLogic>(); logger = TestUtilities.GetMockLogger(); //Arrange metricData = new MetricData { MetricId = 444, ServerId = 555, Metric = new Metric { MetricType = MetricType.AuditAnalysis, HourId = 222, Hour = new Hour { HourTimeStamp = DateTime.UtcNow.AddSeconds(-1) } } }; metricDataRepository.Setup(mr => mr.ReadAsync(metricDataId)).ReturnsAsync(metricData); hourRepository.Setup(h => h.ReadAsync(222)).ReturnsAsync(new Hour { HourTimeStamp = DateTime.UtcNow.AddSeconds(2) }); mockMetricLogic.Setup(ml => ml.CollectMetricData(metricData)).ReturnsAsync(metricData); metricReadyForDataCollectionLogic = new Mock <IMetricReadyForDataCollectionLogic>(); metricReadyForDataCollectionLogic.Setup(l => l.IsReady(It.IsAny <MetricData>())).ReturnsAsync(true); metricDataRepository.Setup(mdr => mdr.UpdateAsync(It.IsAny <MetricData>())).Returns(Task.Delay(10)); this.metricDataService.Setup(s => s.GetMetricData(metricDataId)).ReturnsAsync(metricData); this.metricTask = new MetricTask( metricDataRepository.Object, metricFactory.Object, isReadyMetricFactory.Object, metricDataService.Object, logger.Object); }
public async Task AuditAnalysisMetricLogic_ScoreMetric() { // Arrange var metricData = new MetricData { Metric = new Metric { HourId = 123 }, ServerId = 234 }; var analyses = new[] { new AuditAnalysis { TotalComplexQueries = 10, TotalLongRunningQueries = 5, TotalQueries = 20, TotalSimpleLongRunningQueries = 5 } }; this.auditAnalysisRepository.Setup(r => r.ReadByMetricData(metricData)) .ReturnsAsync(analyses.ToList()); // Act var result = await logic.ScoreMetric(metricData); // Assert Assert.That(result, Is.EqualTo(50.0)); }
public async Task UptimeScoring_ScoreMetricsAsync_NoUptimes() { // Arrange var someOtherMetricData = new MetricData { Metric = new Metric { MetricType = MetricType.Ram } }; var metricDatas = new List <MetricData>() { someOtherMetricData }; var categoryScore = new CategoryScore(); // Act var logic = new UptimeScoringLogic(uptimeRatingsRepository.Object, maintenanceWindowRepository.Object, logger.Object); var result = await logic.ScoreMetrics(categoryScore, metricDatas); // Assert Assert.That(result, Is.EqualTo(100m)); }
private void TrackMetric(EventData e, IReadOnlyCollection <EventMetadata> metadata) { Debug.Assert(metadata != null); foreach (EventMetadata metricMetadata in metadata) { MetricData metricData; var result = MetricData.TryGetData(e, metricMetadata, out metricData); if (result.Status != DataRetrievalStatus.Success) { this.healthReporter.ReportWarning("ApplicationInsightsOutput: " + result.Message, EventFlowContextIdentifiers.Output); continue; } MetricTelemetry mt = new MetricTelemetry(); mt.Name = metricData.MetricName; mt.Value = metricData.Value; AddProperties(mt, e); telemetryClient.TrackMetric(mt); } }
private BulkIndexOperation <EventData> CreateMetricOperation( EventData eventData, EventMetadata metricMetadata, string currentIndexName, string documentTypeName) { var result = MetricData.TryGetData(eventData, metricMetadata, out MetricData metricData); if (result.Status != DataRetrievalStatus.Success) { this.healthReporter.ReportProblem("ElasticSearchOutput: " + result.Message, EventFlowContextIdentifiers.Output); return(null); } var metricEventData = eventData.DeepClone(); metricEventData.Payload[nameof(MetricData.MetricName)] = metricData.MetricName; metricEventData.Payload[nameof(MetricData.Value)] = metricData.Value; var operation = CreateOperation(metricEventData, currentIndexName, documentTypeName); return(operation); }
public void TestAddMultipleMetricsSucceeds() { var component = new ComponentData("TestName", "com.newrelic.test"); var metric1 = new MetricData("Test/Metric1", "units", 2); var metric2 = new MetricData("Test/Metric2", "units", 3); var metric3 = new MetricData("Test/Metric3", "units", 4); Assert.AreEqual(0, TestSerializationHelper.GetMetricsMapFromComponent(component).Count, "Should be zero metrics"); component.AddMetric(metric1); component.AddMetric(metric2); component.AddMetric(metric3); var serializedComponent = component.Serialize(DateTime.Now.Subtract(TimeSpan.FromSeconds(60))); var metrics = TestSerializationHelper.GetMetricsMapFromComponent(component); Assert.AreEqual(3, metrics.Count, "Should be three metrics"); Assert.AreEqual(2, TestSerializationHelper.GetValueFromMetricMap(metrics, metric1.FullName, MetricValues.Value)); Assert.AreEqual(3, TestSerializationHelper.GetValueFromMetricMap(metrics, metric2.FullName, MetricValues.Value)); Assert.AreEqual(4, TestSerializationHelper.GetValueFromMetricMap(metrics, metric3.FullName, MetricValues.Value)); }
public void MetricDataExpectedReadFailures() { EventData eventData = new EventData(); // Invalid metadata type EventMetadata metricMetadata = new EventMetadata("someOtherType"); var result = MetricData.TryGetData(eventData, metricMetadata, out MetricData md); Assert.Equal(DataRetrievalStatus.InvalidMetadataType, result.Status); // Missing metric name property metricMetadata = new EventMetadata(MetricData.MetricMetadataKind); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.MetadataPropertyMissing, result.Status); Assert.Contains("Expected property 'metricName'", result.Message); // No metricValue or metricValueProperty on the metadata metricMetadata.Properties.Add(MetricData.MetricNameMoniker, "SomeMetric"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.MetadataPropertyMissing, result.Status); Assert.Contains("Expected property 'metricValue'", result.Message); // metricValue cannot be parsed metricMetadata.Properties.Add("metricValue", "not_a_number"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.InvalidMetadataValue, result.Status); // metricValueProperty points to a property that does not exist metricMetadata.Properties.Remove("metricValue"); metricMetadata.Properties.Add("metricValueProperty", "value"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.DataMissingOrInvalid, result.Status); // metricValueProperty points to a property that does not containa a value that can be parsed as double eventData.Payload.Add("value", "not-a-number"); result = MetricData.TryGetData(eventData, metricMetadata, out md); Assert.Equal(DataRetrievalStatus.DataMissingOrInvalid, result.Status); }
public override async Task <MetricData> GetDataAsync(CancellationToken cancellationToken) { try { var azureDevOpsHelper = new AzureDevOpsHelper(ConfigProvider, RootConfig); var codeCoverageDetails = await azureDevOpsHelper.GetCodeCoverageResultForLastGreenBuildAsync(cancellationToken); if (codeCoverageDetails == null) { return(MetricData.NoUpdate()); } var linesCoverageData = codeCoverageDetails.CoverageData.First().CoverageStats.Single(x => x.Label == "Lines"); var percentageCoverage = Math.Round(100m * linesCoverageData.Covered / linesCoverageData.Total, 1); return(new MetricData(percentageCoverage, MetricType.Percentage, Status.OK)); } catch (Exception ex) { return(MetricData.Error(ex.Message)); } }
public async Task WebUptimeMetricLogic_CollectMetricData_NewDataSource() { //Arrange var metricData = new MetricData(); serverRepository.Setup(r => r.ReadAllActiveAsync()).ReturnsAsync(servers); configRepository.Setup(r => r.ReadConfigurationValue(ConfigurationKeys.Section, ConfigurationKeys.WebUptimeUserAgent)).Returns("Chrome"); var logic = new WebUptimeMetricLogic(httpClientFactoryMock.Object, serverRepository.Object, metricDataService.Object, configRepository.Object, logger.Object); //Act var result = await logic.CollectMetricData(metricData); //Assert var resultWebUptime = (WebUptime)result; Assert.That(result, Is.Not.Null); Assert.That(resultWebUptime, Is.Not.Null); Assert.That(resultWebUptime.SuccessfulSamples, Is.EqualTo(1)); Assert.That(resultWebUptime.TotalSamples, Is.EqualTo(1)); }
public async Task AgentUptimeMetricLogic_CollectMetricData_NewAgentUptime() { //Arrange var metric = new MetricData { Metric = new Metric { Hour = new Hour() } }; metricDataService.Setup(mds => mds.GetData <AgentUptime>(It.IsAny <MetricData>())).Returns((AgentUptime)null); agentHistoryRepository.Setup(r => r.ReadByHourAsync(It.IsAny <Hour>())) .ReturnsAsync(Enumerable.Range(0, 100).Select(i => new AgentHistory { Successful = i % 10 != 0 }).ToList()); var logic = new AgentUptimeMetricLogic(agentHistoryRepository.Object, metricDataService.Object, this.loggerMock.Object); //Act var result = await logic.CollectMetricData(metric); //Assert Assert.That(result, Is.Not.Null); }
public async Task AuditAnalysisMetricLogic_IsReady(bool poisonWaitReady, bool searchAuditBatchReady, bool expectedResult) { // Arrange var server = new Server(); var hour = new Hour(); var metricData = new MetricData { Metric = new Metric { Hour = hour }, Server = server, ServerId = 123 }; this.poisonWaitRepository.Setup(r => r.ReadIfPoisonWaitsForHourAsync(hour)) .ReturnsAsync(poisonWaitReady); this.searchAuditBatchRepository.Setup(r => r.ExistsForHourAndServer(metricData.Metric.HourId, metricData.ServerId.Value)) .ReturnsAsync(searchAuditBatchReady); // Act var result = await logic.IsReady(metricData); // Assert Assert.That(result, Is.EqualTo(expectedResult)); }
private void CreateObservations() { if (MetricData.Count <= 1) { return; } if (!MetricData.All(md => md.Timestamps.Any())) { return; } var personMetrics = MetricData.First(); var classMetrics = MetricData.Skip(1).ToList(); PersonObservation = new ObservationWindow(personMetrics); ClassObservations = classMetrics.Select(classMetric => new ObservationWindow(classMetric)).ToList(); ObservationsCreated?.Invoke( PersonObservation.Values.Count, PersonObservation.FirstObservation, PersonObservation.LastObservation); }
public async Task GetMetricData_MetricDataId() { // Arrange var service = new MetricDataService(metricRepository.Object, metricDataRepository.Object, hourRepository.Object, serverRepository.Object); var metricDataId = 4; var metricId = 3; var hourId = 2; var serverId = 1; var metricData = new MetricData { MetricId = metricId, Id = metricDataId, ServerId = serverId }; var metric = new Metric { Id = metricId, HourId = hourId }; var hour = new Hour { Id = hourId }; var server = new Server { ServerId = serverId }; this.metricDataRepository.Setup(m => m.ReadAsync(metricDataId)).ReturnsAsync(metricData); this.metricRepository.Setup(m => m.ReadAsync(metricData.MetricId)).ReturnsAsync(metric); this.hourRepository.Setup(m => m.ReadAsync(metric.HourId)).ReturnsAsync(hour); this.serverRepository.Setup(m => m.ReadAsync(metricData.ServerId.Value)).ReturnsAsync(server); // Act var result = await service.GetMetricData(metricDataId); // Assert Assert.That(metricData.Data, Is.Null); this.metricDataRepository.VerifyAll(); this.metricRepository.VerifyAll(); this.hourRepository.VerifyAll(); this.serverRepository.VerifyAll(); }
public async Task Rto_CollectMetricData_NullDurations() { // Arrange var metricData = new MetricData { Server = new Server { ServerId = 123 } }; this.databaseRepository.Setup(r => r.ReadByServerIdAsync(metricData.Server.ServerId)) .ReturnsAsync(new[] { new Database(), new Database(), new Database(), }); // Act var result = await this.rtoMetricLogic.CollectMetricData(metricData); // Assert Assert.That(result, Is.Not.Null); Assert.That(((RtoMetricLogic.RtoMetricData)result).TimeToRecover, Is.Null); }
public async Task AgentUptimeMetricLogic_ScoreMetric_NoUptime() { //Arrange var metric = new MetricData { Metric = new Metric { Hour = new Hour { HourTimeStamp = DateTime.UtcNow } } }; metricDataService.Setup(mds => mds.GetData <AgentUptime>(It.IsAny <MetricData>())).Returns((AgentUptime)null); agentHistoryRepository.Setup(r => r.ReadEarliestAsync()).ReturnsAsync(new AgentHistory { TimeStamp = DateTime.UtcNow.AddYears(-10) }); var logic = new AgentUptimeMetricLogic(agentHistoryRepository.Object, metricDataService.Object, this.loggerMock.Object); //Act var result = await logic.ScoreMetric(metric); //Assert Assert.That(result, Is.EqualTo(Defaults.Scores.Uptime)); }
public static List <MetricData> Parse(SortedSetEntry[] sortedSetEntries) { var metrics = new List <MetricData>(); foreach (var sortedSetEntry in sortedSetEntries) { string key = sortedSetEntry.Element.ToString().Trim('"'); var data = key.Split(new string[] { ":" }, StringSplitOptions.None); var metricData = new MetricData { MetricId = data[0], Value = long.Parse(data[1]), TimespanInMicroseconds = (long)sortedSetEntry.Score, }; var timespanInSeconds = metricData.TimespanInMicroseconds / 1_000_000; metricData.DownsampledTimestampInSeconds = (long)(Math.Floor(timespanInSeconds / GRANULARITY) * GRANULARITY); metrics.Add(metricData); } return(metrics); }
protected override void ReportCounter(string name, MetricData.CounterValue value, Unit unit, MetricTags tags) { var itemColumns = value.Items.SelectMany(i => new[] { i.Item + " - Count", i.Item + " - Percent" }); var columns = CounterColumns.Concat(itemColumns); var itemValues = value.Items.SelectMany(i => new[] { Value(i.Count), Value(i.Percent) }); var data = new[] { Value(value.Count) }.Concat(itemValues); Pack(name, columns, data); }
protected override void ReportHistogram(string name, MetricData.HistogramValue value, Unit unit, MetricTags tags) { Pack(name, HistogramColumns, new[]{ Value(value.Count), Value(value.LastValue), Value(value.LastUserValue), Value(value.Min), Value(value.MinUserValue), Value(value.Mean), Value(value.Max), Value(value.MaxUserValue), Value(value.StdDev), Value(value.Median), Value(value.Percentile75), Value(value.Percentile95), Value(value.Percentile98), Value(value.Percentile99), Value(value.Percentile999), Value(value.SampleSize) }); }
/// <summary> /// Create handler for performance counter telemetry. /// </summary> private Action <ITelemetry> CreateHandlerForPerformanceCounterTelemetry(EventSource eventSource, MethodInfo writeGenericMethod, Type eventSourceOptionsType, PropertyInfo eventSourceOptionsKeywordsProperty) { var eventSourceOptions = Activator.CreateInstance(eventSourceOptionsType); var keywords = Keywords.Metrics; eventSourceOptionsKeywordsProperty.SetValue(eventSourceOptions, keywords); var dummyMetricData = new MetricData(); var dummyDataPoint = new DataPoint(); var writeMethod = writeGenericMethod.MakeGenericMethod(new { PartA_iKey = this.dummyPartAiKeyValue, PartA_Tags = this.dummyPartATagsValue, PartB_MetricData = new { // The properties and layout should be the same as MetricData_types.cs dummyMetricData.ver, metrics = new[] { new { // The properties and layout should be the same as DataPoint_types.cs dummyDataPoint.ns, dummyDataPoint.name, dummyDataPoint.kind, dummyDataPoint.value, dummyDataPoint.count, dummyDataPoint.min, dummyDataPoint.max, dummyDataPoint.stdDev, }, }.AsEnumerable(), dummyMetricData.properties, }, PartA_flags = this.dummyPartAFlagsValue, }.GetType()); return((item) => { if (this.EventSourceInternal.IsEnabled(EventLevel.Verbose, keywords)) { #pragma warning disable 618 var telemetryItem = (item as PerformanceCounterTelemetry).Data; CopyGlobalPropertiesIfRequired(item, telemetryItem.Properties); #pragma warning restore 618 item.Sanitize(); var data = telemetryItem.Data; var extendedData = new { // The properties and layout should be the same as the anonymous type in the above MakeGenericMethod PartA_iKey = telemetryItem.Context.InstrumentationKey, PartA_Tags = telemetryItem.Context.SanitizedTags, PartB_MetricData = new { data.ver, metrics = data.metrics.Select(i => new { i.ns, i.name, i.kind, i.value, i.count, i.min, i.max, i.stdDev, }), data.properties, }, PartA_flags = telemetryItem.Context.Flags, }; writeMethod.Invoke(eventSource, new object[] { MetricTelemetry.TelemetryName, eventSourceOptions, extendedData }); } }); }
public static MetricsDataValueSource ToMetricValueSource(this MetricData source) { var contexts = source.Contexts.ToMetricValueSource(); return(new MetricsDataValueSource(source.Timestamp, new EnvironmentInfo(source.Environment), contexts)); }
protected override void ReportTimer(string name, MetricData.TimerValue value, Unit unit, TimeUnit rateUnit, TimeUnit durationUnit, MetricTags tags) { Pack(name, TimerColumns, new[]{ Value(value.Rate.Count), Value(value.ActiveSessions), Value(value.Rate.MeanRate), Value(value.Rate.OneMinuteRate), Value(value.Rate.FiveMinuteRate), Value(value.Rate.FifteenMinuteRate), Value(value.Histogram.LastValue), Value(value.Histogram.LastUserValue), Value(value.Histogram.Min), Value(value.Histogram.MinUserValue), Value(value.Histogram.Mean), Value(value.Histogram.Max), Value(value.Histogram.MaxUserValue), Value(value.Histogram.StdDev), Value(value.Histogram.Median), Value(value.Histogram.Percentile75), Value(value.Histogram.Percentile95), Value(value.Histogram.Percentile98), Value(value.Histogram.Percentile99), Value(value.Histogram.Percentile999), Value(value.Histogram.SampleSize) }); }
public Task <decimal> ScoreMetric(MetricData metricData) { var rpoMetricData = this.metricDataService.GetData <RpoMetricData>(metricData); return(Task.FromResult(ScorePotentialDataLoss(rpoMetricData?.PotentialDataLoss))); }
protected override void ReportMeter(string name, MetricData.MeterValue value, Unit unit, TimeUnit rateUnit, MetricTags tags) { var itemColumns = value.Items.SelectMany(i => new[] { i.Item + " - Count", i.Item + " - Percent", i.Item + " - Mean Rate", i.Item + " - 1 Min Rate", i.Item + " - 5 Min Rate", i.Item + " - 15 Min Rate" }); var columns = MeterColumns.Concat(itemColumns); var itemValues = value.Items.SelectMany(i => new[] { Value(i.Value.Count), Value(i.Percent), Value(i.Value.MeanRate), Value(i.Value.OneMinuteRate), Value(i.Value.FiveMinuteRate), Value(i.Value.FifteenMinuteRate) }); var data = new[] { Value(value.Count), Value (value.MeanRate), Value (value.OneMinuteRate), Value (value.FiveMinuteRate), Value (value.FifteenMinuteRate) }.Concat(itemValues); Pack(name, columns, data); }