public ProbeResultingCallbackStateObject(int timeTableId, Target target, MetricGroup metricGroup, SqlCommand sqlCommand) { this.TimeTableId = timeTableId; this.Target = target; this.MetricGroup = metricGroup; this.SqlCommand = sqlCommand; }
} // end of GenerateSqlSlowDict method #endregion private static methods declarations #region private methods declarations /// <summary>Saves single row & slow changing metric</summary> private void WriteSlowSingleRowToRepository(int targetId, MetricGroup metricGroup, ProbeResultingData data) { int dataMatches; string dataSqlStmt; object[] newValues; // compare with in-memory data dataMatches = this.CompareSlowSingleRowWithInMemoryData(targetId, metricGroup, data, this.reposConn); // generate SQL statement dataSqlStmt = GenerateSqlSingleRowSlow(targetId, metricGroup, dataMatches, data); _logger.Trace(dataSqlStmt); SqlServerProbe.ExecuteSql(dataSqlStmt, targetId, metricGroup); // update in-memory data newValues = new object[data.NumberOfColumns]; for (int i = 0; i < data.NumberOfColumns; i++) { newValues[i] = data.values[0, i]; } if (dataMatches == -1) { InMemoryCache.Add(InMemoryCache.GetCacheKey(targetId, metricGroup), -1, new object[] { targetId }, newValues); } else { Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup)].UpdateRowValues(new object[] { targetId }, newValues); } } // end of WriteSlowSingleRowToRepository function
} // end of WriteSlowSingleRowToRepository function /// <summary>Saves single row & fast changing metric</summary> private void WriteFastSingleRowToRepository(int targetId, MetricGroup metricGroup, ProbeResultingData data) { // generate SQL statement string dataSqlStmt = GenerateSqlSingleRowFast(targetId, metricGroup, data); _logger.Trace(dataSqlStmt); SqlServerProbe.ExecuteSql(dataSqlStmt, targetId, metricGroup); // update in-memory data object[] newValues = new object[data.NumberOfColumns]; for (int i = 0; i < data.NumberOfColumns; i++) { newValues[i] = data.values[0, i]; } // create in-memory cache table if it doesn't exist string cacheKey = InMemoryCache.GetCacheKey(-1, metricGroup); if (!InMemoryCache.ContainsKey(cacheKey)) { // Do no create new cache table if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } InMemoryCache.CreateCacheTableSingleRow(metricGroup, CacheType.Data); } Configuration.inMemoryCache[cacheKey].AddOrUpdateRowValues(-1, new object[] { targetId }, newValues); } // end of WriteFastSingleRowToRepository function
} // end of GenerateFastSqlSingleRow function // Generates INSERT statement for a single data row - static dictionary private static string GenerateSqlStaticDict(int targetId, MetricGroup metricGroup, ProbeResultingData data, List <int> rowsNotInDict) { string sqlStmt = "INSERT INTO " + SqlServerProbe.DictTableName(targetId, metricGroup) + " ("; for (int i = 0; i < metricGroup.NumberOfMultiRowKeys; i++) { sqlStmt += Environment.NewLine + metricGroup.multiRowKeys[i].name.Replace(' ', '_') + ","; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 1); // remove last comma sqlStmt += ")" + Environment.NewLine; sqlStmt += "VALUES " + Environment.NewLine; foreach (int i in rowsNotInDict) { sqlStmt += "("; for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++) { sqlStmt += SqlServerProbe.DataValueToString(metricGroup.multiRowKeys[j].type, data.values[i, j]) + ","; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 1); // remove last comma sqlStmt += "),"; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 1); // remove last comma return(sqlStmt); } // end of GenerateSqlStaticDict method
} // end of GenerateSlowSqlSingleRow function // Generates INSERT statement for a single data row - fast changing metric private static string GenerateSqlSingleRowFast(int targetId, MetricGroup metricGroup, ProbeResultingData data) { string dataSqlStmt = "INSERT INTO " + SqlServerProbe.DataTableName(targetId, metricGroup) + " (dt,"; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { dataSqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ","; } dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1); // remove last comma dataSqlStmt += ")" + Environment.NewLine + "VALUES ('" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "',"; if (metricGroup.NumberOfMetrics != data.NumberOfColumns) { throw new Exception("Number of metrics don't match number of columns in probe results"); } // add metric values for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { dataSqlStmt += SqlServerProbe.DataValueToString(metricGroup.metrics[i].type, data.values[0, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + i]) + ","; } dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1); // remove last comma dataSqlStmt += ")"; return(dataSqlStmt); } // end of GenerateFastSqlSingleRow function
public object Clone() { var cpi = new CompanyProfile { Id = Id, Name = Name, Street = Street, City = City, State = State, Zip = Zip, Country = Country, TaxCode = TaxCode, VatCode = VatCode, Email = Email, Web = Web, Phone = Phone, Mobile = Mobile, Note = Note, ContactName = ContactName, ContactEmail = ContactEmail, ContactPhone = ContactPhone, ProfileRate = (CompanyProfileRate)ProfileRate.Clone(), ComparerOptions = (ComparerSettings)ComparerOptions.Clone(), MetricGroup = (QualityMetricGroup)MetricGroup.Clone() }; return(cpi); }
} // end of GetCacheKey method public static bool GetCurrentValues(int targetId, MetricGroup metricGroup, string[] metrics, out DataRow data) { if (metricGroup.isMultiRow) { throw new Exception("Only single-row metric group has been implemented so far"); } return(GetCurrentValuesSingleRow(targetId, metricGroup, metrics, out data)); }
private static HostMetricValue ToHostMetricValue(string hostId, long time, MetricGroup metricGroup, MetricValue metricValue) { return(new HostMetricValue() { hostId = hostId, time = time, name = metricGroup.name + (string.IsNullOrEmpty(metricValue.name) ? "" : "." + metricValue.name.NoamalizeName()), value = metricValue.value }); }
} // end of LoadDataIntoCache function #endregion public method declarations #region private method declarations private static bool GetCurrentValuesSingleRow(int targetId, MetricGroup metricGroup, string[] metrics, out DataRow data) { data = null; object[] values = null; CacheTable cache = null; // Do not load data from repository if it has not been done yet. This means no current stats are available yet. if (metricGroup.isCumulative) { if (ContainsKey(GetCacheKey(-1, metricGroup))) { cache = _cache[GetCacheKey(-1, metricGroup)]; } } else { if (ContainsKey(GetCacheKey(targetId, metricGroup))) { cache = _cache[GetCacheKey(targetId, metricGroup)]; } } data = new DataRow(); // get values by key (targetId) if (cache != null) { int id = cache.GetIdByKey(new object[] { (object)targetId }); if (id != -1) { values = cache[id]; } } if (values == null) { foreach (string metricName in metrics) { data.Add(metricName, new DataValue(metricGroup[metricName].type, null)); } return(false); } foreach (string metricName in metrics) { data.Add(metricName, new DataValue(cache.GetColumnMetadataByName(metricName).type, values[cache.GetValueColumnIdByName(metricName) + 1] ) ); } return(true); }
public void StartItem(MetricGroup group, MetricCounter item) { Check.IsEqual("module", group.Name); CodeMetric info = item.Group; WriteLine("<module moduleId='{0}' name='{1}' assembly='{2}' assemblyIdentity='{3}'>", _moduleId++, XmlEncode(info.FileName), XmlEncode(info.AssemblyName), XmlEncode(info.AssemblyFullName) ); }
private void WriteHeader(XmlParser parser) { MetricGroup methods = parser.ByMethod; _writer.WriteLine("<?xml version='1.0' encoding='utf-8'?>"); _writer.WriteLine("<?xml-stylesheet href='coverage.xsl' type='text/xsl'?>"); WriteLine("<coverage profilerVersion='{0}' driverVersion='{1}' startTime='{2}' measureTime='{3}'>", parser.VersionInfo, parser.VersionDriver, XmlConvert.ToString(parser.StartTime, XmlDateTimeSerializationMode.RoundtripKind), XmlConvert.ToString(parser.StartTime, XmlDateTimeSerializationMode.RoundtripKind) ); }
private MetricGroup Map(MetricGroup entity) { if (entity == null) { entity = Mapper.Map <MetricGroup>(this); // new metricgroup } else { Mapper.Map(this, entity); } return(entity); }
private List <MetricGroup> GropUtil(BsonDocument[] pipeline) { var result = collection.Aggregate(pipeline); var Documents = result.ResultDocuments; List <MetricGroup> L = new List <MetricGroup>(); foreach (BsonDocument doc in Documents) { MetricGroup M = new MetricGroup(); M.Y = (int)doc["Count"]; int X = (int)doc["Date"]; M.X = (X % 100).ToString(); L.Add(M); } return(L); }
void WriteItem(MetricGroup group, MetricCounter item, bool close) { CodeMetric info = item.Group; List <CodeMetric> seqpnts = item.Seqpnts; bool anyInstruments = false; bool allExcluded = true; foreach (CodeMetric metric in seqpnts) { anyInstruments |= metric.Instrumented; allExcluded &= metric.Excluded; } WriteLine("<method name='{0}' excluded='{5}' instrumented='{4}' class='{1}{2}{3}'>", XmlEncode(info.MethodName), XmlEncode(info.Namespace), String.IsNullOrEmpty(info.Namespace) ? String.Empty : ".", XmlEncode(info.Class), anyInstruments ? TRUE : FALSE, allExcluded ? TRUE : FALSE ); foreach (CodeMetric metric in seqpnts) { if (metric.Excluded) { continue; } WriteLine("<seqpnt visitcount='{0}' line='{1}' column='{2}' endline='{3}' endcolumn='{4}' excluded='{5}' document='{6}' />", metric.VisitCount, metric.Line, metric.Column, metric.EndLine, metric.EndColumn, metric.Excluded ? TRUE : FALSE, XmlEncode(metric.SrcFile) ); } WriteLine("</method>"); }
} // end of CreateCacheTableSingleRowRealtime /// <summary> Returns cache key name /// For single-row metric cache is common for all targets (table name only, no schema) /// Multi-row metrics each have its own cache </summary> /// <param name="targetId">target id or -1 for single row metrics</param> /// <param name="metricGroup">metric group</param> /// <param name="CacheType">data/dictionary</param> public static string GetCacheKey(int targetId, MetricGroup metricGroup, CacheType cacheType = CacheType.Data) { switch (cacheType) { case CacheType.Data: if (metricGroup.isMultiRow) { return(SqlServerProbe.DataTableName(targetId, metricGroup)); } else { return(metricGroup.dataTableName); } case CacheType.Dictionary: return(SqlServerProbe.DictTableName(targetId, metricGroup)); default: throw new Exception("Unsupported cache type"); } } // end of GetCacheKey method
} // end of ProcessQueue function #endregion public methods declarations #region private static methods declarations /// <summary>Generates UPDATE or INSERT statement for a single data row - slow changing metric</summary> private static string GenerateSqlSingleRowSlow(int targetId, MetricGroup metricGroup, int dataMatches, ProbeResultingData data) { string dataSqlStmt; if (dataMatches == 0) // just update endDate when current data matches one stored in repository { dataSqlStmt = "UPDATE " + SqlServerProbe.DataTableName(targetId, metricGroup) + " SET endDate = '" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "'" + " WHERE startDate = (SELECT MAX(startDate) FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + ")"; } else { if (metricGroup.NumberOfMetrics != data.NumberOfColumns) { throw new Exception("Number of metrics doesn't match number of columns in probe results"); } dataSqlStmt = "INSERT INTO " + SqlServerProbe.DataTableName(targetId, metricGroup) + " ("; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { dataSqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ","; } dataSqlStmt += "startDate,endDate)" + Environment.NewLine + "VALUES ("; // add metric values for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { dataSqlStmt += SqlServerProbe.DataValueToString(metricGroup.metrics[i].type, data.values[0, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + i]) + ","; } // startDate,endDate dataSqlStmt += "'" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "','" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "')"; } return(dataSqlStmt); } // end of GenerateSlowSqlSingleRow function
public IActionResult Create([FromBody] MetricGroup group) { if (!ModelState.IsValid) { return(BadRequest(ModelState)); } var existing = this.MetricGroupRepository .FindBy(t => t.Name.Equals(group.Name)) .FirstOrDefault(); if (existing != null) { return(BadRequest($"Metric group with name {group.Name} already exists")); } if (!group.Id.HasValue || group.Id == Guid.Empty) { group.Id = Guid.NewGuid(); } this.MetricGroupRepository.Add(group); return(CreatedAtAction(nameof(Get), new { name = group.Name }, group)); }
public void StopGroup(MetricGroup group) {}
public void StartGroup(MetricGroup group) {}
void WriteItem(MetricGroup group, MetricCounter item, bool close) { CodeMetric info = item.Group; List<CodeMetric> seqpnts = item.Seqpnts; List<CodeMetric> unused = new List<CodeMetric>(); foreach (CodeMetric metric in seqpnts) { if (!metric.Excluded && metric.Instrumented && metric.VisitCount == 0) unused.Add(metric); } if (unused.Count == 0) return; WriteLine("<method name='{0}' class='{1}{2}{3}'>", XmlEncode(info.MethodName), XmlEncode(info.Namespace), String.IsNullOrEmpty(info.Namespace) ? String.Empty : ".", XmlEncode(info.Class) ); unused.Sort(); foreach (CodeMetric metric in unused) { int lineStart = metric.Line - 1; int lineEnd = metric.EndLine - 1; int colStart = Math.Max(metric.Column - 1, 0); int colEnd = Math.Max(metric.EndColumn - 1, 0); string[] src; if (!_sourceFiles.TryGetValue(metric.SrcFile, out src)) { try { src = File.ReadAllLines(metric.SrcFile); } catch (FileNotFoundException) { src = new string[0]; } } StringBuilder sb = new StringBuilder(); for (int ix = lineStart; ix < src.Length && ix < lineEnd; ix++) { string line = src[ix]; if (ix == lineStart) line = line.Substring(Math.Min(colStart, line.Length)); sb.AppendLine(line); } if (lineEnd < src.Length) { string line = src[lineEnd]; int start = Math.Min(line.Length, lineStart == lineEnd ? colStart : 0); int end = Math.Min(line.Length, Math.Max(start, colEnd)); sb.AppendLine(line.Substring(start, end - start)); } WriteLine("<seqpnt src='{0}'/>", XmlEncode(sb.ToString().Trim())); } WriteLine("</method>"); }
// Loads dictionary from repository into in-memory cache. Creates a new record in dictionaryCache public static void LoadDictionaryIntoCache(int targetId, MetricGroup metricGroup, bool allowReload, SqlConnection connection = null, SqlTransaction transaction = null) { string cacheKey; SqlConnection conn; int tryCount = 0; bool canExit = false; // create new in-memory cache for dictionary if (!ContainsKey(GetCacheKey(targetId, metricGroup, CacheType.Dictionary))) { Dictionary <int, Column> keyColumns = new Dictionary <int, Column>(); Dictionary <int, Column> attrColumns = new Dictionary <int, Column>(); // key columns for (int i = 0; i < metricGroup.NumberOfMultiRowKeys; i++) { keyColumns.Add(i, metricGroup.multiRowKeys[i]); } // attribute columns for (int i = 0; i < metricGroup.NumberOfMultiRowKeyAttributes; i++) { attrColumns.Add(i, metricGroup.multiRowKeyAttributes[i]); } TryAdd(GetCacheKey(targetId, metricGroup, CacheType.Dictionary), new CacheTable(keyColumns, attrColumns, false)); } cacheKey = GetCacheKey(targetId, metricGroup, CacheType.Dictionary); CacheTable tmpCache = _cache[cacheKey].CloneAndClear(); // don't reload cache unless allowReload is specified if (allowReload == false && tmpCache.loadedFromDatabase) { return; } string sqlStmt = "SELECT id, "; for (int i = 0; i < metricGroup.NumberOfMultiRowKeys; i++) { sqlStmt += metricGroup.multiRowKeys[i].name.Replace(' ', '_') + ", "; } for (int i = 0; i < metricGroup.NumberOfMultiRowKeyAttributes; i++) { sqlStmt += metricGroup.multiRowKeyAttributes[i].name.Replace(' ', '_') + ", "; } if (metricGroup.multiRowKeyAttributesChangeSpeed == ChangeSpeed.Static) { sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2); // remove last comma } else { sqlStmt += "startDate "; } sqlStmt += Environment.NewLine + "FROM " + SqlServerProbe.DictTableName(targetId, metricGroup); if (metricGroup.multiRowKeyAttributesChangeSpeed == ChangeSpeed.Slow) { sqlStmt += Environment.NewLine + "WHERE endDate IS NULL"; } _logger.Trace(sqlStmt); while (!canExit && tryCount < 2) { try { if (connection == null) { conn = new SqlConnection(Configuration.GetReposConnectionString("Cache")); conn.Open(); } else { conn = connection; } using (SqlCommand cmd = conn.CreateCommand()) { cmd.CommandText = sqlStmt; cmd.CommandType = System.Data.CommandType.Text; if (transaction != null) { cmd.Transaction = transaction; } using (SqlDataReader dataReader = cmd.ExecuteReader()) { int id; object[] keys = new object[metricGroup.NumberOfMultiRowKeys]; object[] values = new object[metricGroup.NumberOfMultiRowKeyAttributes]; while (dataReader.Read()) { id = (int)dataReader["id"]; for (int i = 0; i < metricGroup.NumberOfMultiRowKeys; i++) { keys[i] = dataReader[1 + i]; } for (int i = 0; i < metricGroup.NumberOfMultiRowKeyAttributes; i++) { values[i] = dataReader[1 + metricGroup.NumberOfMultiRowKeys + i]; } // add record to dictionary switch (metricGroup.multiRowKeyAttributesChangeSpeed) { case ChangeSpeed.Static: tmpCache.Add(id, keys, values); break; case ChangeSpeed.Slow: tmpCache.Add(id, keys, values, (DateTime)dataReader[1 + metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes]); break; default: throw new Exception("Only Static and Slow changing dictionaries are supported"); } } tmpCache.loadedFromDatabase = true; Replace(cacheKey, tmpCache); dataReader.Close(); } } if (connection == null) { conn.Close(); } } catch (SqlException e) { if (e.Number == 208) // Invalid object { // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup); } else { _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString()); } } // end of catch tryCount++; } // end of while } // end of LoadDictionaryIntoCache function
public static void Enqueue(Target target, MetricGroup metricGroup, ProbeResultingData newData) { var msg = new ProbeResultsDataMessage(target, metricGroup, newData); _dataQueue.Enqueue(msg); }
public void WriteItem(MetricGroup group, MetricCounter item) { WriteItem(group, item, true); }
public void StartItem(MetricGroup group, MetricCounter item) { WriteItem(group, item, false); }
} // end of function // this method is called when query execution is finished public override void ProcessResults(IAsyncResult result) { SqlCommand cmd = null; SqlConnection con = null; SqlDataReader dataReader = null; ushort rowsProcessed = 0; ushort numOfRows = 1; bool columnNamesMatch = true; try { // Retrieve state object var stateObj = (ProbeResultingCallbackStateObject)result.AsyncState; // Extract data from state object cmd = stateObj.SqlCommand; int timeTableId = stateObj.TimeTableId; Target target = stateObj.Target; MetricGroup metricGroup = stateObj.MetricGroup; con = cmd.Connection; dataReader = cmd.EndExecuteReader(result); // prepare ProbeResultingData if (metricGroup.isMultiRow) { numOfRows = DATA_ROWS_INCREMENT; // set initial size } var data = new ProbeResultingData(numOfRows, (ushort)(metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + metricGroup.NumberOfMetrics)); data.SetProbeDateTime(DateTime.Now); for (int i = 0; i < metricGroup.NumberOfMultiRowKeys; i++) { data.AddColumnHeader(metricGroup.multiRowKeys[i].name, metricGroup.multiRowKeys[i].type); } for (int i = 0; i < metricGroup.NumberOfMultiRowKeyAttributes; i++) { data.AddColumnHeader(metricGroup.multiRowKeyAttributes[i].name, metricGroup.multiRowKeyAttributes[i].type); } for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { data.AddColumnHeader(metricGroup.metrics[i].name, metricGroup.metrics[i].type); } // read results and save in results object while (dataReader.Read() && columnNamesMatch) { // check that column names match configuration if (rowsProcessed == 0) { for (int i = 0; i < dataReader.FieldCount; i++) { if (i < metricGroup.NumberOfMultiRowKeys) { if (string.Compare(dataReader.GetName(i), metricGroup.multiRowKeys[i].name.Replace(' ', '_'), true) != 0) { _logger.Error("Actual name of key column # " + i.ToString() + " [" + dataReader.GetName(i) + "] doesn't match to configuration [" + metricGroup.multiRowKeys[i].name.Replace(' ', '_') + "]"); columnNamesMatch = false; break; } } else if (i < metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes) { if (string.Compare(dataReader.GetName(i), metricGroup.multiRowKeyAttributes[i - metricGroup.NumberOfMultiRowKeys].name.Replace(' ', '_'), true) != 0) { _logger.Error("Actual name of key attribute column # " + i.ToString() + " [" + dataReader.GetName(i) + "] doesn't match to configuration [" + metricGroup.multiRowKeyAttributes[i - metricGroup.NumberOfMultiRowKeys].name.Replace(' ', '_') + "]"); columnNamesMatch = false; break; } } else { if (string.Compare(dataReader.GetName(i), metricGroup.metrics[i - metricGroup.NumberOfMultiRowKeys - metricGroup.NumberOfMultiRowKeyAttributes].name.Replace(' ', '_'), true) != 0) { _logger.Error("Actual name of column # " + i.ToString() + " [" + dataReader.GetName(i) + "] doesn't match to configuration [" + metricGroup.metrics[i - metricGroup.NumberOfMultiRowKeys - metricGroup.NumberOfMultiRowKeyAttributes].name.Replace(' ', '_') + "]"); columnNamesMatch = false; break; } } } } if (rowsProcessed == numOfRows) { numOfRows += DATA_ROWS_INCREMENT; data.ChangeNumOfRows(numOfRows); } for (int i = 0; i < dataReader.FieldCount; i++) { // check data type before casting. Data type of a column returned by query may not match the one set up in Configuration switch (data.dataTypes[i]) { case DataType.Ansi: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Ansi)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric (target [" + target.name + "]) does not match any allowed data type for internal data type Ansi"); } data.values[rowsProcessed, i] = (object)dataReader.GetString(i); break; case DataType.Unicode: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Unicode)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric (target [" + target.name + "]) does not match any allowed data type for internal data type Unicode"); } data.values[rowsProcessed, i] = (object)dataReader.GetString(i); break; case DataType.Double: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Double)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric (target [" + target.name + "]) does not match any allowed data type for internal data type Double"); } data.values[rowsProcessed, i] = (object)dataReader.GetDouble(i); break; case DataType.SmallInt: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.SmallInt)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric (target [" + target.name + "]) does not match any allowed data type for internal data type Int16"); } data.values[rowsProcessed, i] = (object)dataReader.GetInt16(i); break; case DataType.Datetime: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Datetime)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric (target [" + target.name + "]) does not match any allowed data type for internal data type Datetime"); } data.values[rowsProcessed, i] = (object)dataReader.GetDateTime(i); break; default: throw new Exception("Unknown data type"); } // end of switch } // end of for rowsProcessed++; } // trim extra pre-allocated rows if (rowsProcessed != numOfRows) { data.ChangeNumOfRows(rowsProcessed); } Configuration.timeTable.SetLastPoll(timeTableId, data.probeDateTime); // pass msg to Analyzer Analyzer.Enqueue(target, metricGroup, data); } // end of try catch (Exception e) { _logger.Error("SqlServerProbe.ProcessResults: " + e.Message); _logger.Error(e.StackTrace); } finally { if (dataReader != null) { dataReader.Close(); dataReader.Dispose(); } if (cmd != null) { cmd.Dispose(); } if (con != null) { con.Close(); con.Dispose(); } } } // end of ProcessResults method
// returns name of data table public static string DataTableName(int targetId, MetricGroup metricGroup) { return(SchemaName(targetId) + "." + metricGroup.dataTableName); }
public void StartGroup(MetricGroup group) { }
private static void LoadCacheTableFromDatabaseSingleRowRealtime(MetricGroup metricGroup) { SqlConnection conn = null; SqlCommand cmd = null; SqlDataReader dataReader = null; // check whether dictionary is loaded if (!ContainsKey(GetCacheKey(-1, metricGroup))) { CreateCacheTableSingleRow(metricGroup, CacheType.Data); } CacheTable cache = _cache[GetCacheKey(-1, metricGroup)]; if (cache.loadedFromDatabase) { return; } try { conn = new SqlConnection(Configuration.GetReposConnectionString("Cache")); conn.Open(); int targetId; string query; object[] values = new object[metricGroup.NumberOfMetrics]; // load data for each target with the specified metric group active foreach (InstanceSchedule schedule in Configuration.timeTable.Values) { targetId = schedule._targetId; if (schedule._metricGroupId == metricGroup.id) { query = QueryToLoadSingleRowRealtime(targetId, metricGroup); cmd = null; dataReader = null; int attempt = 1; bool canTry = true; while (attempt < 3 && canTry) { try { cmd = new SqlCommand(query); cmd.Connection = conn; dataReader = cmd.ExecuteReader(); // should return only one row if (dataReader.Read()) { for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { values[i] = dataReader[metricGroup.metrics[i].name.Replace(' ', '_')]; } cache.Add(-1, new object[] { targetId }, values); } dataReader.Close(); } catch (SqlException e) { switch (e.Number) { case 208: // Invalid object // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup); break; default: _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString()); _logger.Error(e.StackTrace); canTry = false; break; } } finally { if (dataReader != null) { ((IDisposable)dataReader).Dispose(); } if (cmd != null) { ((IDisposable)cmd).Dispose(); } } attempt++; } } } conn.Close(); } catch (SqlException e) { _logger.Error(e.Message); _logger.Error(e.StackTrace); } finally { if (conn != null) { ((IDisposable)conn).Dispose(); } } cache.loadedFromDatabase = true; }
public void StopItem(MetricGroup group, MetricCounter item) { WriteLine("</{0}>", group.Name); }
public void StopItem(MetricGroup group, MetricCounter item) { Check.IsEqual("module", group.Name); WriteLine("</module>"); }
public static IEnumerable <MetricGroup> GetMackerelMetricsSnapshot( this MetricsDataValueSource snapshot) { var result = new List <MetricGroup>(); foreach (var group in snapshot.Contexts) { foreach (var metricGroup in group.ApdexScores.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, metricGroup.Key), type = MetricType.Gauge }; foreach (var metric in metricGroup) { mackerelMetricGroup.metric.AddRange(metric.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } foreach (var metricGroup in group.Gauges.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, metricGroup.Key), type = MetricType.Gauge }; foreach (var metric in metricGroup) { mackerelMetricGroup.metric.AddRange(metric.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } foreach (var metricGroup in group.Counters.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, metricGroup.Key), type = MetricType.Gauge }; foreach (var metric in metricGroup) { mackerelMetricGroup.metric.AddRange(metric.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } foreach (var metricGroup in group.Meters.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, $"{metricGroup.Key}_total"), type = MetricType.Counter }; foreach (var metric in metricGroup) { mackerelMetricGroup.metric.AddRange(metric.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } foreach (var metricGroup in group.Histograms.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, metricGroup.Key), type = MetricType.Histogram }; foreach (var timer in metricGroup) { mackerelMetricGroup.metric.AddRange(timer.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } foreach (var metricGroup in group.Timers.GroupBy( source => source.IsMultidimensional ? source.MultidimensionalName : source.Name)) { var mackerelMetricGroup = new MetricGroup { name = ToMetricName(group.Context, metricGroup.Key), type = MetricType.Timer }; foreach (var timer in metricGroup) { mackerelMetricGroup.metric.AddRange(timer.ToMackerelMetrics()); } result.Add(mackerelMetricGroup); } } return(result); }
private string GetMaxValueMultiRowCumulative(int targetId, MetricGroup metricGroup, string metric, string[] keysToReturn) { object[] keys; string maxValue = string.Empty; if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data))) { InMemoryCache.LoadDataIntoCache(targetId, metricGroup, false); } CacheTable dataCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data)]; int id = dataCache.GetIdOfMaxValue(metric, metricGroup.metrics[metricGroup.GetMetricIdByName(metric)].type); if (id == -1) { return(maxValue); } if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary))) { InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, false); } CacheTable dictCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary)]; int keyId; foreach (string keyName in keysToReturn) { keyId = metricGroup.GetKeyIdByName(keyName); if (keyId != -1) { keys = dictCache[id]; if (keys == null || keys[keyId] == null) { maxValue += " / "; } else { maxValue += String.Format("{0} / ", keys[keyId]); } } else { keyId = metricGroup.GetKeyAttributeIdByName(keyName); keys = dictCache[id]; if (keys == null || keys[metricGroup.NumberOfMultiRowKeys + keyId] == null) { maxValue += " / "; } else { maxValue += String.Format("{0} / ", keys[metricGroup.NumberOfMultiRowKeys + keyId]); } } } maxValue = maxValue.Remove(maxValue.Length - 3); return(maxValue); }
} // end of Work method private bool Archive(int targetId, ArchiveOffset archiveOffset, MetricGroup metricGroup, DateTime archiveTo, DateTime archiveFrom) { // Do not archive static and slow changing metrics if (metricGroup.changeSpeed != ChangeSpeed.Fast) { return(false); } // Compose SQL statement // Save aggregated data in a temp table string sqlStmt = "SELECT " + RoundDate("dt", archiveOffset.IntervalInSeconds) + " as dt, " + Environment.NewLine; // add dictId if the metric group has multiple rows if (metricGroup.isMultiRow) { sqlStmt += "dictId, "; } // Add AVG(column names) foreach (var item in metricGroup.metrics) { sqlStmt += "AVG(" + item.Value.name.Replace(' ', '_') + ") as " + item.Value.name.Replace(' ', '_') + ", "; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2) + Environment.NewLine; // remove last comma sqlStmt += "INTO #AVG_TMP_" + metricGroup.dataTableName + Environment.NewLine; sqlStmt += "FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + Environment.NewLine; sqlStmt += "WHERE dt BETWEEN @dateFrom AND @dateTo" + Environment.NewLine; sqlStmt += "GROUP BY " + RoundDate("dt", archiveOffset.IntervalInSeconds) + ", "; // add dictId if the metric group has multiple rows if (metricGroup.isMultiRow) { sqlStmt += "dictId, "; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2) + ";" + Environment.NewLine + Environment.NewLine; // remove last comma // Delete aggregated records sqlStmt += "DELETE FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + " WHERE dt BETWEEN @dateFrom AND @dateTo;" + Environment.NewLine + Environment.NewLine; // Copy records from the temp table sqlStmt += "INSERT INTO " + SqlServerProbe.DataTableName(targetId, metricGroup) + " (dt, "; // add dictId if the metric group has multiple rows if (metricGroup.isMultiRow) { sqlStmt += "dictId, "; } // Add column names foreach (var item in metricGroup.metrics) { sqlStmt += item.Value.name.Replace(' ', '_') + ", "; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2); // remove last comma sqlStmt += ")" + Environment.NewLine; sqlStmt += "SELECT dt, "; // add dictId if the metric group has multiple rows if (metricGroup.isMultiRow) { sqlStmt += "dictId, "; } // Add column names foreach (var item in metricGroup.metrics) { sqlStmt += item.Value.name.Replace(' ', '_') + ", "; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2) + Environment.NewLine; // remove last comma sqlStmt += "FROM #AVG_TMP_" + metricGroup.dataTableName + Environment.NewLine + Environment.NewLine; // Update ArchivedToDate value sqlStmt += "UPDATE dbo.ArchiveWatermarks SET ArchivedToDate = @dateTo WHERE ArchiveOffsetId = @archiveOffsetId and TargetId = @targetId;"; _logger.Trace(sqlStmt); // Execute SQL statement SqlTransaction reposTran = null; SqlCommand reposCmd = null; try { if (_reposConn.State != ConnectionState.Open) { _reposConn.Open(); } reposTran = _reposConn.BeginTransaction(); reposCmd = _reposConn.CreateCommand(); reposCmd.Transaction = reposTran; reposCmd.CommandType = CommandType.Text; reposCmd.CommandText = sqlStmt; reposCmd.CommandTimeout = 300; reposCmd.Parameters.Add("@targetId", SqlDbType.Int); reposCmd.Parameters["@targetId"].Value = targetId; reposCmd.Parameters.Add("@archiveOffsetId", SqlDbType.Int); reposCmd.Parameters["@archiveOffsetId"].Value = archiveOffset.Id; reposCmd.Parameters.Add("@dateFrom", SqlDbType.DateTime2, 6); reposCmd.Parameters["@dateFrom"].Value = RoundDate(archiveFrom, archiveOffset.IntervalInSeconds); reposCmd.Parameters.Add("@dateTo", SqlDbType.DateTime2, 6); reposCmd.Parameters["@dateTo"].Value = archiveTo; reposCmd.Prepare(); reposCmd.ExecuteNonQuery(); reposTran.Commit(); } catch (SqlException e) { if (_reposConn.State != ConnectionState.Open) { Manager.SetRepositoryAccessibility(false); return(false); } switch (e.Number) { case 208: // Ignore missing tables. Target might be recently initialized break; default: _logger.Error("SqlException: {0} ErrorCode: {1}", e.Message, e.Number); break; } if (reposTran != null) { // Transaction might be rolled back if commit fails. In this case second rollback will fail try { reposTran.Rollback(); } catch (Exception) { _logger.Debug("Transaction has been rolled back already"); } } return(false); } catch (Exception e) { if (_reposConn.State == ConnectionState.Open) { _logger.Error(e.Message); _logger.Error(e.StackTrace); } else { Manager.SetRepositoryAccessibility(false); } return(false); } finally { if (reposCmd != null) { ((IDisposable)reposCmd).Dispose(); } if (reposTran != null) { ((IDisposable)reposTran).Dispose(); } } return(true); } // end of Archive method
void WriteItem(MetricGroup group, MetricCounter item, bool close) { CodeMetric info = item.Group; List<CodeMetric> seqpnts = item.Seqpnts; bool anyInstruments = false; bool allExcluded = true; foreach (CodeMetric metric in seqpnts) { anyInstruments |= metric.Instrumented; allExcluded &= metric.Excluded; } WriteLine("<method name='{0}' excluded='{5}' instrumented='{4}' class='{1}{2}{3}'>", XmlEncode(info.MethodName), XmlEncode(info.Namespace), String.IsNullOrEmpty(info.Namespace) ? String.Empty : ".", XmlEncode(info.Class), anyInstruments ? TRUE : FALSE, allExcluded ? TRUE : FALSE ); foreach (CodeMetric metric in seqpnts) { if (metric.Excluded) continue; WriteLine("<seqpnt visitcount='{0}' line='{1}' column='{2}' endline='{3}' endcolumn='{4}' excluded='{5}' document='{6}' />", metric.VisitCount, metric.Line, metric.Column, metric.EndLine, metric.EndColumn, metric.Excluded ? TRUE : FALSE, XmlEncode(metric.SrcFile) ); } WriteLine("</method>"); }
void WriteItem(MetricGroup group, MetricCounter item, bool close) { WriteLine("<{0} name='{1}' sequencePoints='{2}' unvisitedPoints='{3}' coverage='{4}' acceptable='{5}'{6}>", group.Name, XmlEncode(item.Name), item.TotalSeqpnts, item.Unvisited, Math.Round(item.Coverage, 4), ACCEPTABLE, close ? " /" : String.Empty ); }
} // end of LoadDictionaryIntoCache function // Loads data from repository into in-memory cache. Creates a new record in dictionaryCache public static void LoadDataIntoCache(int targetId, MetricGroup metricGroup, bool allowReload, SqlConnection connection = null, SqlTransaction transaction = null) { string cacheKey; SqlConnection conn; if (metricGroup.changeSpeed != ChangeSpeed.Fast) { throw new Exception("Only fast changing metric is allowed"); } SqlCommand cmd = null; SqlDataReader dataReader = null; // create new in-memory cache for dictionary if (!ContainsKey(GetCacheKey(targetId, metricGroup, CacheType.Data))) { Dictionary <int, Column> valueColumns = new Dictionary <int, Column>(); // value/metric columns for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { valueColumns.Add(i, metricGroup.metrics[i]); } TryAdd(GetCacheKey(targetId, metricGroup, CacheType.Data), new CacheTable(new Dictionary <int, Column>(), valueColumns, metricGroup.isCumulative)); } cacheKey = GetCacheKey(targetId, metricGroup, CacheType.Data); CacheTable tmpCache = _cache[cacheKey].CloneAndClear(); // don't reload cache unless allowReload is specified if (allowReload == false && tmpCache.loadedFromDatabase) { return; } try { string sqlStmt = "SELECT dictId, "; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { sqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ", "; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 2); // remove last comma sqlStmt += Environment.NewLine + "FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + Environment.NewLine + "WHERE dt = (SELECT MAX(dt) FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + ")"; _logger.Trace(sqlStmt); if (connection == null) { conn = new SqlConnection(Configuration.GetReposConnectionString("Cache")); conn.Open(); } else { conn = connection; } int attempt = 1; bool canTry = true; while (attempt < 3 && canTry) { cmd = conn.CreateCommand(); cmd.CommandText = sqlStmt; cmd.CommandType = System.Data.CommandType.Text; if (transaction != null) { cmd.Transaction = transaction; } try { dataReader = cmd.ExecuteReader(); int id; object[] values = new object[metricGroup.NumberOfMetrics]; while (dataReader.Read()) { id = (int)dataReader["dictId"]; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { values[i] = dataReader[metricGroup.metrics[i].name.Replace(' ', '_')]; } tmpCache.Add(id, new object[0], values); } dataReader.Close(); tmpCache.loadedFromDatabase = true; Replace(cacheKey, tmpCache); } catch (SqlException e) { if (transaction != null) { transaction.Rollback(); } switch (e.Number) { case 208: // Invalid object // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup); break; default: _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString()); canTry = false; break; } } finally { if (dataReader != null) { ((IDisposable)dataReader).Dispose(); } if (cmd != null) { ((IDisposable)cmd).Dispose(); } } attempt++; } if (connection == null) { conn.Close(); } } catch (Exception e) { _logger.Error("SqlException: " + e.Message); } } // end of LoadDataIntoCache function
public void StopGroup(MetricGroup group) { }
public void StartGroup(MetricGroup group) { WriteLine("<{0}s>", group.Name); }
public void StopGroup(MetricGroup group) { WriteLine("</{0}s>", group.Name); }