} // end of WriteSlowSingleRowToRepository function /// <summary>Saves single row & fast changing metric</summary> private void WriteFastSingleRowToRepository(int targetId, MetricGroup metricGroup, ProbeResultingData data) { // generate SQL statement string dataSqlStmt = GenerateSqlSingleRowFast(targetId, metricGroup, data); _logger.Trace(dataSqlStmt); SqlServerProbe.ExecuteSql(dataSqlStmt, targetId, metricGroup); // update in-memory data object[] newValues = new object[data.NumberOfColumns]; for (int i = 0; i < data.NumberOfColumns; i++) { newValues[i] = data.values[0, i]; } // create in-memory cache table if it doesn't exist string cacheKey = InMemoryCache.GetCacheKey(-1, metricGroup); if (!InMemoryCache.ContainsKey(cacheKey)) { // Do no create new cache table if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } InMemoryCache.CreateCacheTableSingleRow(metricGroup, CacheType.Data); } Configuration.inMemoryCache[cacheKey].AddOrUpdateRowValues(-1, new object[] { targetId }, newValues); } // end of WriteFastSingleRowToRepository function
private string GetMaxValueMultiRowCumulative(int targetId, MetricGroup metricGroup, string metric, string[] keysToReturn) { object[] keys; string maxValue = string.Empty; if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data))) { InMemoryCache.LoadDataIntoCache(targetId, metricGroup, false); } CacheTable dataCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data)]; int id = dataCache.GetIdOfMaxValue(metric, metricGroup.metrics[metricGroup.GetMetricIdByName(metric)].type); if (id == -1) { return(maxValue); } if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary))) { InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, false); } CacheTable dictCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary)]; int keyId; foreach (string keyName in keysToReturn) { keyId = metricGroup.GetKeyIdByName(keyName); if (keyId != -1) { keys = dictCache[id]; if (keys == null || keys[keyId] == null) { maxValue += " / "; } else { maxValue += String.Format("{0} / ", keys[keyId]); } } else { keyId = metricGroup.GetKeyAttributeIdByName(keyName); keys = dictCache[id]; if (keys == null || keys[metricGroup.NumberOfMultiRowKeys + keyId] == null) { maxValue += " / "; } else { maxValue += String.Format("{0} / ", keys[metricGroup.NumberOfMultiRowKeys + keyId]); } } } maxValue = maxValue.Remove(maxValue.Length - 3); return(maxValue); }
/// <summary>Returns true if new data matches in-memory copy or no history is found - single row - slow changing</summary> private int CompareSlowSingleRowWithInMemoryData(int targetId, MetricGroup metricGroup, ProbeResultingData data, SqlConnection connection) { bool noHistory = false; CacheTable dataCache; // create in-memory cache table if it doesn't exist if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(-1, metricGroup))) { // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return(-1); } InMemoryCache.CreateCacheTableSingleRow(metricGroup, CacheType.Data); } dataCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(-1, metricGroup, CacheType.Data)]; // load latest row from the repository if it is not in-memory yet int id = dataCache.GetIdByKey(new object[] { targetId }); if (id == -1) { string sqlStmt = "SELECT "; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { sqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ","; } sqlStmt = sqlStmt.Remove(sqlStmt.Length - 1); // remove last comma sqlStmt += " FROM " + SqlServerProbe.DataTableName(targetId, metricGroup); sqlStmt += " WHERE startDate = (SELECT MAX(startDate) FROM " + SqlServerProbe.DataTableName(targetId, metricGroup) + ")"; using (SqlCommand cmd = connection.CreateCommand()) { cmd.CommandType = System.Data.CommandType.Text; cmd.CommandText = sqlStmt; try { SqlDataReader dataReader = cmd.ExecuteReader(); if (dataReader.Read()) { object[] oldValues = new object[metricGroup.NumberOfMetrics]; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { // check data type before casting switch (metricGroup.metrics[i].type) { case DataType.Ansi: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Ansi)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric does not match any allowed data type for internal data type Ansi"); } oldValues[i] = (object)dataReader.GetString(i); break; case DataType.Unicode: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Unicode)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric does not match any allowed data type for internal data type Unicode"); } oldValues[i] = (object)dataReader.GetString(i); break; case DataType.Double: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Double)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric does not match any allowed data type for internal data type Double"); } oldValues[i] = (object)dataReader.GetDouble(i); break; case DataType.SmallInt: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.SmallInt)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric does not match any allowed data type for internal data type Int16"); } oldValues[i] = (object)dataReader.GetInt16(i); break; case DataType.Datetime: if (!DataTypeMappingSqlServer.DoesBelong(dataReader.GetDataTypeName(i), DataType.Datetime)) { throw new Exception("Data type of column #" + (i + 1).ToString() + " of '" + metricGroup.name + "' metric does not match any allowed data type for internal data type Datetime"); } oldValues[i] = (object)dataReader.GetDateTime(i); break; default: throw new Exception("Unknown data type"); } // end of switch i++; } id = dataCache.Add(-1, new object[] { targetId }, oldValues); } else { noHistory = true; } dataReader.Close(); dataReader.Dispose(); } catch (SqlException e) { if (e.Number == 208) // Invalid object { // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return(-1); } SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup); noHistory = true; } else { throw; } } } if (noHistory) { return(-1); } } // compare old and new values object[] newValues = new object[metricGroup.NumberOfMetrics]; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { newValues[i] = data.values[0, i]; } if (dataCache.CompareAttributesForKey(id, newValues)) { return(0); } else { return(1); } }
} // end of WriteFastSingleRowToRepository function /// <summary>Saves data into dictionary and data table for multi-value metrics</summary> private void WriteMultipleRowsToRepository(int targetId, MetricGroup metricGroup, ProbeResultingData data) { int id; CacheTable dictCache, dataCache; List <int> newDictRows; List <Tuple <int, int> > oldDictRows; object[] key, attributes; string dataTableName, dictTableName; byte tryCount = 0; bool canExit = false; SqlTransaction tran = null; string dictSqlStmt = string.Empty; string dataSqlStmt = string.Empty; newDictRows = new List <int>(); // ids of records that should be added to the dictionary (new rows or rows with updated attributes) oldDictRows = new List <Tuple <int, int> >(); // ids and dictionary ids of records that changed since last probe and need to be closed dataTableName = SqlServerProbe.DataTableName(targetId, metricGroup); _logger.Debug("Name of data table: " + dataTableName); dictTableName = SqlServerProbe.DictTableName(targetId, metricGroup); _logger.Debug("Name of dictionary: " + dictTableName); // load the dictionary cache table if it doesn't exist if (!InMemoryCache.ContainsKey(dictTableName)) { InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, false); } dictCache = Configuration.inMemoryCache[dictTableName]; // load the dictionary cache table if it doesn't exist if (!InMemoryCache.ContainsKey(dataTableName)) { InMemoryCache.LoadDataIntoCache(targetId, metricGroup, false); } /* * Checks for changed or new records in dictionary and if needed prepares SQL statement to update dictionary table */ switch (metricGroup.multiRowKeyAttributesChangeSpeed) { case ChangeSpeed.Static: // check whether all records are in the dictionary or some need to be added to it for (int i = 0; i < data.NumberOfRows; i++) { key = new object[metricGroup.NumberOfMultiRowKeys]; for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++) { key[j] = data.values[i, j]; } if (dictCache.GetIdByKey(key) == -1) { newDictRows.Add(i); } } // generate SQL statements if there are any new dictionary records if (newDictRows.Count > 0) { dictSqlStmt = GenerateSqlStaticDict(targetId, metricGroup, data, newDictRows); } break; case ChangeSpeed.Slow: // check whether all records are in the dictionary or some need to be added to it for (int i = 0; i < data.NumberOfRows; i++) { key = new object[metricGroup.NumberOfMultiRowKeys]; for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++) { key[j] = data.values[i, j]; } id = dictCache.GetIdByKey(key); if (id == -1) { newDictRows.Add(i); } else // check that attributes match { attributes = new object[metricGroup.NumberOfMultiRowKeyAttributes]; for (int j = 0; j < metricGroup.NumberOfMultiRowKeyAttributes; j++) { attributes[j] = data.values[i, metricGroup.NumberOfMultiRowKeys + j]; } if (!dictCache.CompareAttributesForKey(id, attributes)) { oldDictRows.Add(new Tuple <int, int>(i, id)); // this is to close the old record - UPDATE } } } // generate SQL statements if there are any changes or new records in dictionary if (oldDictRows.Count > 0 || newDictRows.Count > 0) { dictSqlStmt = GenerateSqlSlowDict(targetId, metricGroup, data, oldDictRows, newDictRows); } break; default: throw new Exception("Unknown dictionary change speed"); } /* * Write new data into dictionary but don't close transaction yet */ if (dictSqlStmt.CompareTo(string.Empty) != 0) { _logger.Trace(dictSqlStmt); // If tables don't exist, will try to create them and rerun SQL statements while (!canExit && tryCount < 2) { try { // we will write to the dictionary first and then to the data table so we need to begin a transaction tran = this.reposConn.BeginTransaction(); if (dictSqlStmt.CompareTo(string.Empty) != 0) { // save dictionary changes using (SqlCommand cmd = this.reposConn.CreateCommand()) { cmd.Transaction = tran; cmd.CommandType = System.Data.CommandType.Text; cmd.CommandText = dictSqlStmt; int rowCount = cmd.ExecuteNonQuery(); _logger.Debug("Rows affected: " + rowCount.ToString()); } } InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true, this.reposConn, tran); canExit = true; } catch (SqlException e) { if (tran != null) { tran.Rollback(); tran.Dispose(); tran = null; } switch (e.Number) { case 208: // Invalid object // Do not create tables if target has been deleted if (!Configuration.targets.ContainsKey(targetId)) { return; } SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup); break; default: _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString()); break; } } tryCount++; } } /* * Prepare SQL statement to save data with right references to the dictionary records */ switch (metricGroup.changeSpeed) { case ChangeSpeed.Fast: dataSqlStmt = "INSERT INTO " + dataTableName + " (dt,dictId,"; for (int i = 0; i < metricGroup.NumberOfMetrics; i++) { dataSqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ","; } dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1); // remove last comma dataSqlStmt += ")" + Environment.NewLine + "VALUES"; for (int i = 0; i < data.NumberOfRows; i++) { dataSqlStmt += Environment.NewLine + "('" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "',"; // retrieve corresponding id from dictionary key = new object[metricGroup.NumberOfMultiRowKeys]; for (int k = 0; k < metricGroup.NumberOfMultiRowKeys; k++) { key[k] = data.values[i, k]; } id = dictCache.GetIdByKey(key); dataSqlStmt += id.ToString() + ","; // add metric values for (int j = 0; j < metricGroup.NumberOfMetrics; j++) { dataSqlStmt += SqlServerProbe.DataValueToString(metricGroup.metrics[j].type, data.values[i, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + j]) + ","; } dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1); // remove last comma dataSqlStmt += "),"; } dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1); // remove last comma _logger.Trace(dataSqlStmt); break; default: throw new Exception("Unsupported data change speed"); } /* * Executes SQL statements * If tables don't exist, will try to create them and rerun SQL statements */ try { // save data using (SqlCommand cmd = this.reposConn.CreateCommand()) { if (tran != null) // use same transaction as for the dictionary { cmd.Transaction = tran; } cmd.CommandType = System.Data.CommandType.Text; cmd.CommandText = dataSqlStmt; int rowCount = cmd.ExecuteNonQuery(); _logger.Debug("Rows affected: " + rowCount.ToString()); } if (tran != null) { tran.Commit(); } InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true); dictCache = Configuration.inMemoryCache[dictTableName]; // Update in-memory data cache object[] newValues; dataCache = Configuration.inMemoryCache[dataTableName]; for (int i = 0; i < data.NumberOfRows; i++) { key = new object[metricGroup.NumberOfMultiRowKeys]; for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++) { key[j] = data.values[i, j]; } id = dictCache.GetIdByKey(key); newValues = new object[metricGroup.NumberOfMetrics]; for (int j = 0; j < metricGroup.NumberOfMetrics; j++) { newValues[j] = data.values[i, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + j]; } dataCache.AddOrUpdateRowValues(id, new object[0], newValues); } canExit = true; } catch (SqlException e) { _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString()); if (tran != null) { tran.Rollback(); InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true); } } }