public static void GetDogfoodCPUSeries() { var testCertificateThumbprint = "C35CBFF9FA6C51E51E1DE97B6D1E246F27661301"; var httpsUrl = "https://shoebox2.metrics.nsatc.net/public/monitoringAccount/SignalRShoeboxTest/homeStamp"; var connectionInfo = new ConnectionInfo(new Uri(httpsUrl), testCertificateThumbprint, StoreLocation.LocalMachine); var reader = new MetricReader(connectionInfo); // Single metric var id = new MetricIdentifier("SignalRShoeboxTest", "systemLoad", "PodCpuUsage"); //var id = new MetricIdentifier("SignalRShoeboxTest", "ShoeboxInternal", "MessageCountRaw"); //var id = new MetricIdentifier("SignalRShoeboxTest", "systemLoad", "PodMemory"); // The short link for this series is http://jarvis-int.dc.ad.msft.net/D10A9E2E. var definition = new TimeSeriesDefinition <MetricIdentifier>( id, new Dictionary <string, string> { //{ "ResourceId", resourceId} { "resourceKubeId", "62a558c2-2895-423d-a7b0-05b03a15b65a" } }); TimeSeries <MetricIdentifier, double?> result = reader.GetTimeSeriesAsync(DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow, SamplingType.Max, definition).Result; foreach (var dataPoint in result.Datapoints) { Console.WriteLine("Time: {0}, Value: {1}", dataPoint.TimestampUtc, dataPoint.Value); } Console.WriteLine(JsonConvert.SerializeObject(result)); Console.WriteLine("############################ END OF GetDogfoodSeries ##############################"); }
public static string QueryMetrics(string podName, PlatformType platformType, SystemLoadType systemLoadType, DateTime startTime, DateTime endTime, string filePath) { if (platformType == PlatformType.Dogfood) { var testCertificateThumbprint = "C35CBFF9FA6C51E51E1DE97B6D1E246F27661301"; var httpsUrl = "https://shoebox2.metrics.nsatc.net/public/monitoringAccount/SignalRShoeboxTest/homeStamp"; var connectionInfo = new ConnectionInfo(new Uri(httpsUrl), testCertificateThumbprint, StoreLocation.LocalMachine); var id = new MetricIdentifier("SignalRShoeboxTest", "systemLoad", systemLoadType == SystemLoadType.CPU ? "PodCpuUsage" : "PodMemory"); var reader = new MetricReader(connectionInfo); // The short link for this series is http://jarvis-int.dc.ad.msft.net/D10A9E2E. var definition = new TimeSeriesDefinition <MetricIdentifier>( id, new Dictionary <string, string> { { "podName", podName } }); TimeSeries <MetricIdentifier, double?> result = reader.GetTimeSeriesAsync(startTime, endTime, SamplingType.Max, definition).Result; var strOutput = JsonConvert.SerializeObject(result); using (System.IO.StreamWriter file = new System.IO.StreamWriter(filePath, false)) { file.Write(strOutput); } return(strOutput); } return(null); }
/// <summary> /// Gets the current monitor health status. /// </summary> /// <param name="monitorInstanceDefinition">The monitor instance definition.</param> /// <returns> /// The current monitor health status. /// </returns> public async Task <IMonitorHealthStatus> GetCurrentHealthStatusAsync(TimeSeriesDefinition <MonitorIdentifier> monitorInstanceDefinition) { if (monitorInstanceDefinition == null) { throw new ArgumentNullException(nameof(monitorInstanceDefinition)); } var statuses = await this.GetMultipleCurrentHeathStatusesAsync(monitorInstanceDefinition).ConfigureAwait(false); return(statuses.First().Value); }
/// <summary> /// Reads the monitor information. /// </summary> public static void ReadMonitorInfo() { // Replace 31280E2F2D2220808315C212DF8062A295B28325 with your cert thumbprint, // install it to the "Personal\Certificates" folder in the "Local Computer" certificate store, // and grant the permission of reading the private key to the service/application using the MDM consumption APIs. string testCertificateThumbprint = "31280E2F2D2220808315C212DF8062A295B28325"; var connectionInfo = new ConnectionInfo(testCertificateThumbprint, StoreLocation.LocalMachine, MdmEnvironment.Int); var reader = new MonitorReader(connectionInfo); var id = new MetricIdentifier("MetricTeamInternalMetrics", "PlatformMetrics", "\\Memory\\Available MBytes"); IReadOnlyList <MonitorIdentifier> result = reader.GetMonitorsAsync(id).Result; Console.WriteLine("There are {0} monitors under {1} - {2}.\n", result.Count, id.MetricName, JsonConvert.SerializeObject(result)); var allMonitors = reader.GetMonitorsAsync(id.MonitoringAccount).Result; Console.WriteLine("There are {0} monitors under {1} - {2}.\n", allMonitors.Count, id.MonitoringAccount, JsonConvert.SerializeObject(allMonitors)); var definition = new TimeSeriesDefinition <MonitorIdentifier>( result[0], new Dictionary <string, string> { { "Datacenter", "westus" }, { "__Role", "metrics.server" } }); IMonitorHealthStatus result2 = reader.GetCurrentHeathStatusAsync(definition).Result; Console.WriteLine("The current health status is {0}.\n", JsonConvert.SerializeObject(result2)); var definition2 = new TimeSeriesDefinition <MonitorIdentifier>( result[0], new Dictionary <string, string> { { "Datacenter", "eastus2" }, { "__Role", "metrics.server" } }); var statuses = reader.GetMultipleCurrentHeathStatusesAsync(definition, definition2).Result; Console.WriteLine("The current health statuses are: \n{0}.\n{1}.\n ", JsonConvert.SerializeObject(statuses[0]), JsonConvert.SerializeObject(statuses[1])); // The short link for the history is http://jarvis-int.dc.ad.msft.net/44EA28BD TimeSeries <MonitorIdentifier, bool?> result3 = reader.GetMonitorHistoryAsync(DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow, definition).Result; Console.WriteLine("The monitor health history is:"); foreach (var datapoint in result3.Datapoints) { Console.WriteLine("Time: {0}, Result: {1}", datapoint.TimestampUtc, datapoint.Value); } Console.WriteLine("############################ END OF ReadMonitorInfo ##############################"); }
/// <summary> /// Gets the single time series. /// </summary> /// <param name="useUserAuth">if set to <c>true</c>, use user authentication; otherwise use certificate authentication.</param> public static void GetSingleTimeSeries(bool useUserAuth) { ConnectionInfo connectionInfo; if (useUserAuth) { connectionInfo = new ConnectionInfo(MdmEnvironment.Int); } else { // Replace 31280E2F2D2220808315C212DF8062A295B28325 with your cert thumbprint, // install it to the "Personal\Certificates" folder in the "Local Computer" certificate store, // and grant the permission of reading the private key to the service/application using the MDM consumption APIs. string testCertificateThumbprint = "C35CBFF9FA6C51E51E1DE97B6D1E246F27661301"; connectionInfo = new ConnectionInfo(new Uri("https://shoebox2.metrics.nsatc.net/public/monitoringAccount/SignalRShoeboxTest/homeStamp"), testCertificateThumbprint, StoreLocation.LocalMachine); } var reader = new MetricReader(connectionInfo); // Single metric var id = new MetricIdentifier("SignalRShoeboxTest", "ShoeboxInternal", "ConnectionCountDelta"); //var id = new MetricIdentifier("SignalRShoeboxTest", "systemLoad", "PodMemory"); // The short link for this series is http://jarvis-int.dc.ad.msft.net/D10A9E2E. var definition = new TimeSeriesDefinition <MetricIdentifier>( id, new Dictionary <string, string> { { "ResourceId", "/subscriptions/5ea15035-434e-46ba-97cd-ea0927a47104/resourceGroups/testsoutheastasia/providers/Microsoft.SignalRService/SignalR/honzhangcenable3" } }); TimeSeries <MetricIdentifier, double?> result = reader.GetTimeSeriesAsync(DateTime.UtcNow.AddMinutes(-10), DateTime.UtcNow, SamplingType.Average, definition).Result; foreach (var dataPoint in result.Datapoints) { Console.WriteLine("Time: {0}, Value: {1}", dataPoint.TimestampUtc, dataPoint.Value); } Console.WriteLine("############################ END OF GetSingleTimeSeries ##############################"); }
/// <summary> /// Determines whether it is count sampling type. /// </summary> /// <param name="definition">The definition.</param> /// <param name="samplingTypeIndex">The sampling type index.</param> /// <returns>True if it is count sampling type; false otherwise.</returns> private static bool IsCountSamplingType(TimeSeriesDefinition <MetricIdentifier> definition, int samplingTypeIndex) { return((definition == null && samplingTypeIndex == 0) || (definition != null && definition.SamplingTypes[samplingTypeIndex].Equals(SamplingType.Count))); }
/// <summary> /// Deserializes for one timestamp - v2. /// </summary> /// <param name="version">The serialization version.</param> /// <param name="reader">The bit reader.</param> /// <param name="definition">The definition.</param> /// <param name="values">The values.</param> /// <param name="dataPointIndex">Index of the data point.</param> /// <param name="currentBlockLeadingZeros">The current block leading zeros.</param> /// <param name="currentBlockTrailingZeros">The current block trailing zeros.</param> /// <param name="priorValidValues">The prior valid values.</param> private static void DeserializeForOneTimestampV2AndAbove(byte version, BitBinaryReader reader, TimeSeriesDefinition <MetricIdentifier> definition, List <List <double?> > values, int dataPointIndex, sbyte[] currentBlockLeadingZeros, sbyte[] currentBlockTrailingZeros, double[] priorValidValues) { var numSamplingTypes = values.Count; for (var s = 0; s < numSamplingTypes; s++) { if (dataPointIndex == 0) { // very first value of the series priorValidValues[s] = reader.BinaryReader.ReadDouble(); values[s].Add(GetNullableDouble(priorValidValues[s])); } else { var firstBit = reader.ReadBit(); if (!firstBit) { // first bit is 0 values[s].Add(GetNullableDouble(priorValidValues[s])); } else { var secondBit = reader.ReadBit(); long meaningfulBits; if (!secondBit) { // 2nd bit is 0 while the first is 1. if (currentBlockLeadingZeros[s] < 0) { throw new Exception("The block has not been set so it is a bug in serialization on server"); } var numBitsToRead = BitAggregateMagic.NumBitsInLongInteger - currentBlockLeadingZeros[s] - currentBlockTrailingZeros[s]; meaningfulBits = reader.ReadBits(numBitsToRead); } else { // a new block position was started since the number starts with "11". currentBlockLeadingZeros[s] = (sbyte)reader.ReadBits(GetNumBitsToEncodeNumLeadingZeros(version)); var numBitsToRead = (sbyte)reader.ReadBits(NumBitsToEncodeNumMeaningfulBits); if (numBitsToRead == 0) { // The block size is 64 bits which becomes 0 in writing into 6 bits - overflow. // If the block size were indeed 0 bits, the xor value would be 0, and the actual value would be identical to the prior value, // so we would not have reached here since firstBit would be 0. numBitsToRead = (sbyte)BitAggregateMagic.NumBitsInLongInteger; } currentBlockTrailingZeros[s] = (sbyte)(BitAggregateMagic.NumBitsInLongInteger - currentBlockLeadingZeros[s] - numBitsToRead); meaningfulBits = reader.ReadBits(numBitsToRead); } long xor = meaningfulBits << currentBlockTrailingZeros[s]; priorValidValues[s] = BitConverter.Int64BitsToDouble(xor ^ BitConverter.DoubleToInt64Bits(priorValidValues[s])); values[s].Add(GetNullableDouble(priorValidValues[s])); } } } }
/// <summary> /// Deserializes for one timestamp - v1. /// </summary> /// <param name="reader">The reader.</param> /// <param name="definition">The definition.</param> /// <param name="sparseData">if set to <c>true</c> [sparse data].</param> /// <param name="values">The metric values.</param> /// <param name="sampleTyesWithMetricValueTypeLong">The sample tyes with metric value type long.</param> /// <param name="priorValidValues">The prior valid values.</param> /// <param name="scalingFactor">The scaling factor.</param> private static void DeserializeForOneTimestampV1( BinaryReader reader, TimeSeriesDefinition <MetricIdentifier> definition, bool sparseData, List <List <double?> > values, bool[] sampleTyesWithMetricValueTypeLong, double[] priorValidValues, uint scalingFactor) { // For sparse data, although we don't serialize null values but we do serialize the number of null values between two serialized data points // so that we can restore those null values on the client side. if (sparseData) { var numMissingDatapointsSinceLastOne = SerializationUtils.ReadUInt32FromBase128(reader); FillNulls(numMissingDatapointsSinceLastOne, values); } for (int samplingTypeIndex = 0; samplingTypeIndex < values.Count; samplingTypeIndex++) { if (sampleTyesWithMetricValueTypeLong[samplingTypeIndex]) { // variable length encoding with differences. var delta = SerializationUtils.ReadInt64FromBase128(reader); if (delta == long.MaxValue) { // Padding values[samplingTypeIndex].Add(null); } else { // We request unscaled values from server and apply the scaling factor on the client. var unScaledValue = delta + priorValidValues[samplingTypeIndex]; if (IsCountSamplingType(definition, samplingTypeIndex)) { values[samplingTypeIndex].Add(unScaledValue); } else { values[samplingTypeIndex].Add(unScaledValue / scalingFactor); } priorValidValues[samplingTypeIndex] = unScaledValue; } } else { // We don't encode double in v1, so even a delta is still 8 bytes. var delta = reader.ReadDouble(); if (double.IsNaN(delta)) { // Padding values[samplingTypeIndex].Add(null); } else { var unScaledValue = delta + priorValidValues[samplingTypeIndex]; if (IsCountSamplingType(definition, samplingTypeIndex)) { values[samplingTypeIndex].Add(delta + priorValidValues[samplingTypeIndex]); } else { values[samplingTypeIndex].Add((delta + priorValidValues[samplingTypeIndex]) / scalingFactor); } priorValidValues[samplingTypeIndex] = unScaledValue; } } } }
/// <summary> /// Deserializes one series. /// </summary> /// <param name="version">The serialization version.</param> /// <param name="reader">The reader.</param> /// <param name="definition">The definition.</param> /// <param name="numSamplingTypesRequested">The number of sampling types.</param> /// <returns>An <see cref="DeserializedRawData"/> object.</returns> private static DeserializedRawData DeserializeOneSeries(byte version, BinaryReader reader, TimeSeriesDefinition <MetricIdentifier> definition, int numSamplingTypesRequested) { // Read the number of data points. var totalNumberOfDatapoints = SerializationUtils.ReadInt32FromBase128(reader); if (totalNumberOfDatapoints < 0) { // Failed to query the series. We reuse the same byte(s) for totalNumberOfDatapoints for error codes. return(new DeserializedRawData(0, 0, null, (TimeSeriesErrorCode)totalNumberOfDatapoints)); } // Read the number of missing data points or those with null values so that we can get the total size of data points next. // in V2 and beyond, totalNumberMissingDatapoints is always to set to 0 since each missing data point in a very sparse series just uses a single bit for padding, // and scaling will be done on the server side. uint totalNumberMissingDatapoints = 0; uint scalingFactor = 1; if (version == 1) { totalNumberMissingDatapoints = SerializationUtils.ReadUInt32FromBase128(reader); // We want to apply the scaling factor on the client so that server can return metric values of type long with variable length encoding. scalingFactor = SerializationUtils.ReadUInt32FromBase128(reader); } var numberDatapointsToDeserialize = totalNumberOfDatapoints - (int)totalNumberMissingDatapoints; var numSamplingTypes = definition?.SamplingTypes.Length ?? numSamplingTypesRequested; var values = new List <List <double?> >(numSamplingTypes); for (int index = 0; index < numSamplingTypes; index++) { values.Add(new List <double?>(totalNumberOfDatapoints)); } // We use deltas/differences as compared with the prior data point. // Although deltas/differences don't help values of type double, it will when we encode double type in the future. var priorValidValues = new double[numSamplingTypes]; var sampleTyesWithMetricValueTypeLong = new bool[numSamplingTypes]; // Used for implementing the Gorilla algorithm sbyte[] currentBlockLeadingZeros = new sbyte[numSamplingTypes]; sbyte[] currentBlockTrailingZeros = new sbyte[numSamplingTypes]; for (int index = 0; index < numSamplingTypes; index++) { priorValidValues[index] = 0; currentBlockLeadingZeros[index] = -1; currentBlockTrailingZeros[index] = -1; if (definition == null || IsMetricValueTypeLong(definition.SamplingTypes[index], definition.SeriesResolutionInMinutes, definition.AggregationType)) { sampleTyesWithMetricValueTypeLong[index] = true; } } var sparseData = totalNumberMissingDatapoints > 0; var bitReader = new BitBinaryReader(reader); for (int d = 0; d < numberDatapointsToDeserialize; ++d) { if (version == 1) { DeserializeForOneTimestampV1(reader, definition, sparseData, values, sampleTyesWithMetricValueTypeLong, priorValidValues, scalingFactor); } else { DeserializeForOneTimestampV2AndAbove(version, bitReader, definition, values, d, currentBlockLeadingZeros, currentBlockTrailingZeros, priorValidValues); } } // Fill the remaining missing data points at the tail of the series. if (sparseData) { if (values[0].Count < totalNumberOfDatapoints) { var numNullsToFill = totalNumberOfDatapoints - values[0].Count; FillNulls((uint)numNullsToFill, values); } } // Start time can be adjusted for distinct count metric or when rollup serivce is enabled. var deltaOfStartTimeInMinutes = SerializationUtils.ReadInt32FromBase128(reader); // resolution window can be adjusted for distinct count metric or when rollup serivce is enabled. var deltaOfResolutionWindowInMinutes = SerializationUtils.ReadInt32FromBase128(reader); Debug.Assert(values[0].Count == totalNumberOfDatapoints, $"{values[0].Count} != {totalNumberOfDatapoints}, and this indicates a bug somewhere."); return(new DeserializedRawData(deltaOfStartTimeInMinutes, deltaOfResolutionWindowInMinutes, values, TimeSeriesErrorCode.Success)); }
public async Task <TimeSeries <MonitorIdentifier, bool?> > GetMonitorHistoryAsync( DateTime startTimeUtc, DateTime endTimeUtc, TimeSeriesDefinition <MonitorIdentifier> monitorInstanceDefinition) { if (monitorInstanceDefinition == null) { throw new ArgumentNullException("monitorInstanceDefinition"); } if (startTimeUtc > endTimeUtc) { throw new ArgumentException(string.Format("startTimeUtc [{0}] must be <= endTimeUtc [{1}]", startTimeUtc, endTimeUtc)); } startTimeUtc = new DateTime(startTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute); endTimeUtc = new DateTime(endTimeUtc.Ticks / TimeSpan.TicksPerMinute * TimeSpan.TicksPerMinute); string dimensionsFlattened = null; if (monitorInstanceDefinition.DimensionCombination != null) { dimensionsFlattened = string.Join( "/", monitorInstanceDefinition.DimensionCombination.Select( d => string.Join("/", SpecialCharsHelper.EscapeTwice(d.Key), SpecialCharsHelper.EscapeTwice(d.Value)))); } string operation = $"{this.HealthRelativeUrl}/history"; string url = string.Format( "{0}{1}/monitoringAccount/{2}/metricNamespace/{3}/metric/{4}/monitorId/{5}/from/{6}/to/{7}{8}", this.connectionInfo.GetEndpoint(monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount), operation, monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount, SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MetricIdentifier.MetricNamespace), SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MetricIdentifier.MetricName), SpecialCharsHelper.EscapeTwice(monitorInstanceDefinition.Id.MonitorId), UnixEpochHelper.GetMillis(startTimeUtc), UnixEpochHelper.GetMillis(endTimeUtc), dimensionsFlattened != null ? "/" + dimensionsFlattened : string.Empty); var response = await this.GetResponseAsStringDelegate( new Uri(url), HttpMethod.Get, this.httpClient, monitorInstanceDefinition.Id.MetricIdentifier.MonitoringAccount, operation, null, string.Empty, null, null, MetricQueryResponseDeserializer.CurrentVersion, NumAttempts).ConfigureAwait(false); var values = JsonConvert.DeserializeObject <List <bool?> >(response.Item1); return(new TimeSeries <MonitorIdentifier, bool?>(startTimeUtc, endTimeUtc, SerializationConstants.DefaultSeriesResolutionInMinutes, monitorInstanceDefinition, new List <List <bool?> > { values }, TimeSeriesErrorCode.Success)); }
/// <summary> /// Gets the current heath status asynchronous. /// Deprecated due to wrong spelling ("Heath" instead of "Health") Exists for backward compatibility /// </summary> /// <param name="monitorInstanceDefinition">The monitor instance definition.</param> /// <returns> /// Monitor health status. /// </returns> public async Task <IMonitorHealthStatus> GetCurrentHeathStatusAsync(TimeSeriesDefinition <MonitorIdentifier> monitorInstanceDefinition) { return(await this.GetCurrentHealthStatusAsync(monitorInstanceDefinition).ConfigureAwait(false)); }