private string GetMaxValueMultiRowCumulative(int targetId, MetricGroup metricGroup, string metric, string[] keysToReturn)
        {
            object[] keys;
            string   maxValue = string.Empty;

            if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data)))
            {
                InMemoryCache.LoadDataIntoCache(targetId, metricGroup, false);
            }

            CacheTable dataCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Data)];

            int id = dataCache.GetIdOfMaxValue(metric, metricGroup.metrics[metricGroup.GetMetricIdByName(metric)].type);

            if (id == -1)
            {
                return(maxValue);
            }

            if (!InMemoryCache.ContainsKey(InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary)))
            {
                InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, false);
            }

            CacheTable dictCache = Configuration.inMemoryCache[InMemoryCache.GetCacheKey(targetId, metricGroup, CacheType.Dictionary)];

            int keyId;

            foreach (string keyName in keysToReturn)
            {
                keyId = metricGroup.GetKeyIdByName(keyName);
                if (keyId != -1)
                {
                    keys = dictCache[id];
                    if (keys == null || keys[keyId] == null)
                    {
                        maxValue += " / ";
                    }
                    else
                    {
                        maxValue += String.Format("{0} / ", keys[keyId]);
                    }
                }
                else
                {
                    keyId = metricGroup.GetKeyAttributeIdByName(keyName);
                    keys  = dictCache[id];
                    if (keys == null || keys[metricGroup.NumberOfMultiRowKeys + keyId] == null)
                    {
                        maxValue += " / ";
                    }
                    else
                    {
                        maxValue += String.Format("{0} / ", keys[metricGroup.NumberOfMultiRowKeys + keyId]);
                    }
                }
            }

            maxValue = maxValue.Remove(maxValue.Length - 3);

            return(maxValue);
        }
Пример #2
0
        } // end of WriteFastSingleRowToRepository function

        /// <summary>Saves data into dictionary and data table for multi-value metrics</summary>
        private void WriteMultipleRowsToRepository(int targetId, MetricGroup metricGroup, ProbeResultingData data)
        {
            int        id;
            CacheTable dictCache, dataCache;
            List <int> newDictRows;
            List <Tuple <int, int> > oldDictRows;

            object[] key, attributes;
            string   dataTableName, dictTableName;

            byte           tryCount    = 0;
            bool           canExit     = false;
            SqlTransaction tran        = null;
            string         dictSqlStmt = string.Empty;
            string         dataSqlStmt = string.Empty;

            newDictRows = new List <int>();               // ids of records that should be added to the dictionary (new rows or rows with updated attributes)
            oldDictRows = new List <Tuple <int, int> >(); // ids and dictionary ids of records that changed since last probe and need to be closed

            dataTableName = SqlServerProbe.DataTableName(targetId, metricGroup);
            _logger.Debug("Name of data table: " + dataTableName);

            dictTableName = SqlServerProbe.DictTableName(targetId, metricGroup);
            _logger.Debug("Name of dictionary: " + dictTableName);

            // load the dictionary cache table if it doesn't exist
            if (!InMemoryCache.ContainsKey(dictTableName))
            {
                InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, false);
            }

            dictCache = Configuration.inMemoryCache[dictTableName];

            // load the dictionary cache table if it doesn't exist
            if (!InMemoryCache.ContainsKey(dataTableName))
            {
                InMemoryCache.LoadDataIntoCache(targetId, metricGroup, false);
            }

            /*
             * Checks for changed or new records in dictionary and if needed prepares SQL statement to update dictionary table
             */
            switch (metricGroup.multiRowKeyAttributesChangeSpeed)
            {
            case ChangeSpeed.Static:
                // check whether all records are in the dictionary or some need to be added to it
                for (int i = 0; i < data.NumberOfRows; i++)
                {
                    key = new object[metricGroup.NumberOfMultiRowKeys];

                    for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++)
                    {
                        key[j] = data.values[i, j];
                    }

                    if (dictCache.GetIdByKey(key) == -1)
                    {
                        newDictRows.Add(i);
                    }
                }

                // generate SQL statements if there are any new dictionary records
                if (newDictRows.Count > 0)
                {
                    dictSqlStmt = GenerateSqlStaticDict(targetId, metricGroup, data, newDictRows);
                }

                break;

            case ChangeSpeed.Slow:
                // check whether all records are in the dictionary or some need to be added to it
                for (int i = 0; i < data.NumberOfRows; i++)
                {
                    key = new object[metricGroup.NumberOfMultiRowKeys];
                    for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++)
                    {
                        key[j] = data.values[i, j];
                    }

                    id = dictCache.GetIdByKey(key);
                    if (id == -1)
                    {
                        newDictRows.Add(i);
                    }
                    else     // check that attributes match
                    {
                        attributes = new object[metricGroup.NumberOfMultiRowKeyAttributes];
                        for (int j = 0; j < metricGroup.NumberOfMultiRowKeyAttributes; j++)
                        {
                            attributes[j] = data.values[i, metricGroup.NumberOfMultiRowKeys + j];
                        }

                        if (!dictCache.CompareAttributesForKey(id, attributes))
                        {
                            oldDictRows.Add(new Tuple <int, int>(i, id));    // this is to close the old record - UPDATE
                        }
                    }
                }

                // generate SQL statements if there are any changes or new records in dictionary
                if (oldDictRows.Count > 0 || newDictRows.Count > 0)
                {
                    dictSqlStmt = GenerateSqlSlowDict(targetId, metricGroup, data, oldDictRows, newDictRows);
                }

                break;

            default:
                throw new Exception("Unknown dictionary change speed");
            }

            /*
             * Write new data into dictionary but don't close transaction yet
             */
            if (dictSqlStmt.CompareTo(string.Empty) != 0)
            {
                _logger.Trace(dictSqlStmt);

                // If tables don't exist, will try to create them and rerun SQL statements
                while (!canExit && tryCount < 2)
                {
                    try
                    {
                        // we will write to the dictionary first and then to the data table so we need to begin a transaction
                        tran = this.reposConn.BeginTransaction();

                        if (dictSqlStmt.CompareTo(string.Empty) != 0)
                        {
                            // save dictionary changes
                            using (SqlCommand cmd = this.reposConn.CreateCommand())
                            {
                                cmd.Transaction = tran;
                                cmd.CommandType = System.Data.CommandType.Text;
                                cmd.CommandText = dictSqlStmt;
                                int rowCount = cmd.ExecuteNonQuery();
                                _logger.Debug("Rows affected: " + rowCount.ToString());
                            }
                        }

                        InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true, this.reposConn, tran);
                        canExit = true;
                    }
                    catch (SqlException e)
                    {
                        if (tran != null)
                        {
                            tran.Rollback();
                            tran.Dispose();
                            tran = null;
                        }

                        switch (e.Number)
                        {
                        case 208:     // Invalid object
                            // Do not create tables if target has been deleted
                            if (!Configuration.targets.ContainsKey(targetId))
                            {
                                return;
                            }

                            SqlServerProbe.CreateTablesForMetricGroup(targetId, metricGroup);
                            break;

                        default:
                            _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString());
                            break;
                        }
                    }

                    tryCount++;
                }
            }

            /*
             * Prepare SQL statement to save data with right references to the dictionary records
             */
            switch (metricGroup.changeSpeed)
            {
            case ChangeSpeed.Fast:
                dataSqlStmt = "INSERT INTO " + dataTableName + " (dt,dictId,";

                for (int i = 0; i < metricGroup.NumberOfMetrics; i++)
                {
                    dataSqlStmt += metricGroup.metrics[i].name.Replace(' ', '_') + ",";
                }

                dataSqlStmt  = dataSqlStmt.Remove(dataSqlStmt.Length - 1);    // remove last comma
                dataSqlStmt += ")" + Environment.NewLine + "VALUES";

                for (int i = 0; i < data.NumberOfRows; i++)
                {
                    dataSqlStmt += Environment.NewLine + "('" + SqlServerProbe.DateTimeToString(data.probeDateTime) + "',";

                    // retrieve corresponding id from dictionary
                    key = new object[metricGroup.NumberOfMultiRowKeys];

                    for (int k = 0; k < metricGroup.NumberOfMultiRowKeys; k++)
                    {
                        key[k] = data.values[i, k];
                    }

                    id           = dictCache.GetIdByKey(key);
                    dataSqlStmt += id.ToString() + ",";

                    // add metric values
                    for (int j = 0; j < metricGroup.NumberOfMetrics; j++)
                    {
                        dataSqlStmt += SqlServerProbe.DataValueToString(metricGroup.metrics[j].type, data.values[i, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + j]) + ",";
                    }

                    dataSqlStmt  = dataSqlStmt.Remove(dataSqlStmt.Length - 1);    // remove last comma
                    dataSqlStmt += "),";
                }

                dataSqlStmt = dataSqlStmt.Remove(dataSqlStmt.Length - 1);     // remove last comma
                _logger.Trace(dataSqlStmt);
                break;

            default:
                throw new Exception("Unsupported data change speed");
            }

            /*
             * Executes SQL statements
             * If tables don't exist, will try to create them and rerun SQL statements
             */
            try
            {
                // save data
                using (SqlCommand cmd = this.reposConn.CreateCommand())
                {
                    if (tran != null) // use same transaction as for the dictionary
                    {
                        cmd.Transaction = tran;
                    }

                    cmd.CommandType = System.Data.CommandType.Text;
                    cmd.CommandText = dataSqlStmt;
                    int rowCount = cmd.ExecuteNonQuery();
                    _logger.Debug("Rows affected: " + rowCount.ToString());
                }

                if (tran != null)
                {
                    tran.Commit();
                }

                InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true);
                dictCache = Configuration.inMemoryCache[dictTableName];

                // Update in-memory data cache
                object[] newValues;

                dataCache = Configuration.inMemoryCache[dataTableName];

                for (int i = 0; i < data.NumberOfRows; i++)
                {
                    key = new object[metricGroup.NumberOfMultiRowKeys];

                    for (int j = 0; j < metricGroup.NumberOfMultiRowKeys; j++)
                    {
                        key[j] = data.values[i, j];
                    }

                    id = dictCache.GetIdByKey(key);

                    newValues = new object[metricGroup.NumberOfMetrics];

                    for (int j = 0; j < metricGroup.NumberOfMetrics; j++)
                    {
                        newValues[j] = data.values[i, metricGroup.NumberOfMultiRowKeys + metricGroup.NumberOfMultiRowKeyAttributes + j];
                    }

                    dataCache.AddOrUpdateRowValues(id, new object[0], newValues);
                }

                canExit = true;
            }
            catch (SqlException e)
            {
                _logger.Error("SqlException: " + e.Message + " ErrorCode: " + e.Number.ToString());
                if (tran != null)
                {
                    tran.Rollback();
                    InMemoryCache.LoadDictionaryIntoCache(targetId, metricGroup, true);
                }
            }
        }