private int GetMetricCountFromBlob(string blobName)
        {
            string content = ReportHelpers.Load(StorageAccount, blobName, ContainerName);
            JArray jArray  = JArray.Parse(content);

            return(jArray.Count);
        }
        private void CreateUserAgentReport(DirectoryInfo info)
        {
            string standardError  = string.Empty;
            string standardOutput = string.Empty;
            List <IISUserAgentDetails> userAgentDetails = new List <IISUserAgentDetails>();
            var content = ReportHelpers.Load(StorageAccount, "Configuration.IISUserAgent.json", ContainerName);
            List <IISUserAgentDetails> userAgents = new List <IISUserAgentDetails>();

            userAgents = new JavaScriptSerializer().Deserialize <List <IISUserAgentDetails> >(content);
            foreach (IISUserAgentDetails agent in userAgents)
            {
                string query        = string.Format(@"select count(*) from {0}\*{1}*.log WHERE cs(User-Agent) LIKE '{2}'", info.FullName, ReportDate, agent.UserAgent);
                int    requestCount = InvokeLogParserProcessForUserAgent(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF");
                int    avgTime      = 0;
                if (requestCount > 0)
                {
                    query   = string.Format(@"select avg(time-taken) from {0}\*{1}*.log WHERE cs(User-Agent) LIKE '{2}'", info.FullName, ReportDate, agent.UserAgent);
                    avgTime = InvokeLogParserProcessForUserAgent(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF");
                }
                userAgentDetails.Add(new IISUserAgentDetails(agent.UserAgentName, agent.UserAgent, avgTime, requestCount));
            }

            string blobName = "IISUserAgentDetails" + ReportDate + ".json";
            int    count    = 0;

            foreach (IISUserAgentDetails detail in userAgentDetails)
            {
                var json = new JavaScriptSerializer().Serialize(detail);
                ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple <string, string>(count.ToString(), json), userAgentDetails.Count, ContainerName);
                count++;
            }
        }
        private void CreateRestoreReportForVSTask()
        {
            string[] VsQuery = new JavaScriptSerializer().Deserialize <string[]>(ReportHelpers.Load(StorageAccount, "VsVersion.json", ContainerName));
            using (var sqlConnection = new SqlConnection(ConnectionString.ConnectionString))
            {
                using (var dbExecutor = new SqlExecutor(sqlConnection))
                {
                    sqlConnection.Open();
                    DateTime         date     = DateTime.UtcNow.AddDays(-LastNDays);
                    List <VsRequest> requests = new List <VsRequest>();
                    foreach (string each in VsQuery)
                    {
                        try
                        {
                            var request = dbExecutor.Query <Int32>(string.Format(sqlQueryForVSRestore, date.ToString("yyyy-MM-dd"), each)).SingleOrDefault();
                            requests.Add(new VsRequest("VS" + each, request.ToString()));
                        }

                        catch
                        {
                            requests.Add(new VsRequest("VS" + each, "0"));
                        }
                    }
                    var json = new JavaScriptSerializer().Serialize(requests);
                    ReportHelpers.CreateBlob(StorageAccount, "VsRestoreTrend" + LastNDays.ToString() + "Day.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
                }
            }
        }
        private List <WorkInstanceDetail> GetWorkJobDetail()
        {
            List <WorkInstanceDetail> jobDetail = new List <WorkInstanceDetail>();
            var content = ReportHelpers.Load(StorageAccount, "WorkJobDetail.json", ContainerName);

            if (content != null)
            {
                jobDetail = new JavaScriptSerializer().Deserialize <List <WorkInstanceDetail> >(content);
            }
            return(jobDetail);
        }
        public override void ExecuteCommand()
        {
            //Get the logs for the previous day.
            string DeploymentID = new JavaScriptSerializer().Deserialize <string>(ReportHelpers.Load(StorageAccount, "DeploymentId_" + ServiceName + ".json", ContainerName));
            string blobName     = DeploymentID + "/NuGetGallery/NuGetGallery_IN_{IID}/Web/W3SVC1273337584/u_ex{Date}{Hour}.log";

            blobName = blobName.Replace("{Date}", ReportDate);

            DirectoryInfo info = new System.IO.DirectoryInfo(Environment.CurrentDirectory);

            //Downlog the logs for the last day.

            while (RetryCount-- > 0)
            {
                try
                {
                    if (!Directory.Exists(info.FullName))
                    {
                        Directory.CreateDirectory(info.FullName);
                    }

                    int instanceCount = GetCurrentInstanceCountInGallery(); //get current instance count.
                    for (int i = 0; i < instanceCount; i++)
                    {
                        for (int j = 0; j < 24; j++) //Download the log for each hour
                        {
                            string temp = blobName;
                            temp = temp.Replace("{IID}", i.ToString());
                            temp = temp.Replace("{Hour}", j.ToString("00"));

                            string logName = temp.Substring(temp.LastIndexOf("/") + 1);
                            logName = logName.Substring(0, logName.Length - 4);
                            string localFile = Path.Combine(info.FullName, logName + i.ToString() + "_" + j.ToString() + ".log");
                            ReportHelpers.DownloadBlobToLocalFile(IISStorageAccount, temp, localFile, "wad-iis-requestlogs");
                        }
                    }
                    break; // break if the operation succeeds without doing any retry.
                }
                catch (Exception e)
                {
                    Console.WriteLine(string.Format("Exception thrown while trying to create report : {0}", e.Message));
                }
            }

            //Create a json file
            CreateIPDetailsReport(info);
            CreateResponseTimeReport(info);
            CreateUserAgentReport(info);
        }
Exemple #6
0
        public override void ExecuteCommand()
        {
            JavaScriptSerializer js = new JavaScriptSerializer();
            AlertThresholds      thresholdValues = js.Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
            int error   = thresholdValues.DatabaseSizePercentErrorThreshold;
            int warning = thresholdValues.DatabaseSizePercentWarningThreshold;

            List <DatabaseSize> dbSizeDetails = new List <DatabaseSize>();

            // dbSizeDetails.Add(GetDataSize(PrimaryConnectionString.ConnectionString,threshold));
            dbSizeDetails.Add(GetDataSize(LegacyConnectionString.ConnectionString, error, warning));
            dbSizeDetails.Add(GetDataSize(WarehouseConnectionString.ConnectionString, error, warning));

            var json = js.Serialize(dbSizeDetails);

            ReportHelpers.CreateBlob(StorageAccount, "DBSize.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
        }
        private List <WorkInstanceDetail> GetWorkJobDetail(string date)
        {
            List <WorkInstanceDetail> jobDetail = new List <WorkInstanceDetail>();

            try
            {
                var content = ReportHelpers.Load(StorageAccount, "WorkJobDetail" + date + ".json", ContainerName);

                if (content != null)
                {
                    jobDetail = new JavaScriptSerializer().Deserialize <List <WorkInstanceDetail> >(content);
                }
            }
            catch (Exception ex)
            {
                Console.WriteLine(ex.Message);
            }
            return(jobDetail);
        }
        public override void ExecuteCommand()
        {
            string DeployId = new JavaScriptSerializer().Deserialize <string>(ReportHelpers.Load(StorageAccount, "DeploymentId_" + ServiceName + ".json", ContainerName));
            CloudStorageAccount storageAccount = CloudStorageAccount.Parse(PerfCounterTableStorageAccount);
            CloudTableClient    tableClient    = storageAccount.CreateCloudTableClient();
            CloudTable          table          = tableClient.GetTableReference("WAD" + DeployId + "PT5MRTable");
            int    count = 0;
            double sum   = 0;

            TableQuery <dataEntity> rangeQuery = new TableQuery <dataEntity>().Where(TableQuery.CombineFilters(
                                                                                         TableQuery.GenerateFilterConditionForDate("Timestamp", QueryComparisons.GreaterThan, DateTime.UtcNow.AddMinutes(-frequencyInMin)),
                                                                                         TableOperators.And,
                                                                                         TableQuery.GenerateFilterCondition("CounterName", QueryComparisons.Equal, PerfCounterName)));

            foreach (dataEntity entity in table.ExecuteQuery(rangeQuery))
            {
                count++;
                sum += entity.Total / entity.Count;
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, ServiceName + PerfCounterName + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple <string, string>(String.Format("{0:HH:mm}", DateTime.Now), (sum / count).ToString("F")), 24 * 60 / frequencyInMin, ContainerName);
        }
Exemple #9
0
        /// <summary>
        /// Creates report for count and avg time taken for individual scenarios.
        /// </summary>
        /// <param name="info"></param>
        private void CreateUriStemDetailedReport(DirectoryInfo info)
        {
            List <IISRequestDetails> requestDetails = new List <IISRequestDetails>();
            var content = ReportHelpers.Load(StorageAccount, "Configration.IISRequestStems.json", ContainerName);
            List <IISRequestDetails> UriStems = new List <IISRequestDetails>();

            UriStems = new JavaScriptSerializer().Deserialize <List <IISRequestDetails> >(content);
            foreach (IISRequestDetails stem in UriStems)
            {
                int requestCount = GetDataForUriStem(stem.UriStem, "count (*)", info.FullName);
                int avgTime      = 0;
                if (requestCount > 0)
                {
                    avgTime = GetDataForUriStem(stem.UriStem, "avg (time-taken)", info.FullName);
                }
                requestDetails.Add(new IISRequestDetails(stem.ScenarioName, stem.UriStem, avgTime, requestCount));
            }
            var    json     = new JavaScriptSerializer().Serialize(requestDetails);
            string blobName = "IISRequestDetails" + string.Format("{0:MMdd}", DateTime.Now.AddHours(-1)) + ".json";

            ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple <string, string>(string.Format("{0:HH:00}", DateTime.Now.AddHours(-1)), json), 50, ContainerName);
        }
        private void CreateReportForDataBaseEvents()
        {
            var masterConnectionString = Util.GetMasterConnectionString(ConnectionString.ConnectionString);
            var currentDbName          = Util.GetDbName(ConnectionString.ConnectionString);

            using (var sqlConnection = new SqlConnection(masterConnectionString))
            {
                using (var dbExecutor = new SqlExecutor(sqlConnection))
                {
                    sqlConnection.Open();

                    var usageSeconds = dbExecutor.Query <DatabaseEvent>(string.Format("select start_time, end_time,event_type,event_count,description from sys.event_log where start_time>='{0}' and start_time<='{1}' and database_name = '{2}' and severity = 2", DateTime.UtcNow.AddHours(-LastNHours).ToString("yyyy-MM-dd hh:mm:ss"), DateTime.UtcNow.ToString("yyyy-MM-dd hh:mm:ss"), currentDbName));
                    var json         = new JavaScriptSerializer().Serialize(usageSeconds);
                    ReportHelpers.CreateBlob(StorageAccount, "DBDetailed" + LastNHours.ToString() + "Hour.json", ContainerName, "application/json", ReportHelpers.ToStream(json));

                    var             throttlingEventCount = dbExecutor.Query <Int32>(string.Format("select count(*) from sys.event_log where start_time>='{0}' and start_time<='{1}' and database_name = '{2}' and (event_type Like 'throttling%' or event_type Like 'deadlock')", DateTime.UtcNow.AddHours(-1).ToString("yyyy-MM-dd hh:mm:ss"), DateTime.UtcNow.ToString("yyyy-MM-dd hh:mm:ss"), currentDbName)).SingleOrDefault();
                    var             additional_data      = dbExecutor.Query <string>(string.Format("select additional_data from sys.event_log where start_time>='{0}' and start_time<='{1}' and database_name = '{2}' and (event_type Like 'throttling%' or event_type Like 'deadlock')", DateTime.UtcNow.AddHours(-1).ToString("yyyy-MM-dd hh:mm:ss"), DateTime.UtcNow.ToString("yyyy-MM-dd hh:mm:ss"), currentDbName));
                    AlertThresholds thresholdValues      = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

                    StringBuilder sb = new StringBuilder();
                    foreach (string data in additional_data)
                    {
                        if (data != null)
                        {
                            sb.Append(data + "\n");
                        }
                    }
                    if (throttlingEventCount > thresholdValues.DatabaseThrottlingEventErrorThreshold && LastNHours == 1)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = "Error: SQL Azure DB alert activated for throttling/deadlock event",
                            Details      = string.Format("Number of events exceeded threshold for DB throttling/deadlock events. Error Threshold count : {0}, events noticed in last hour : {1}, all additional data is {2}", thresholdValues.DatabaseThrottlingEventErrorThreshold, throttlingEventCount, sb.ToString()),
                            AlertName    = "Error: SQL Azure DB throttling/deadlock event",
                            Component    = "SQL Azure Database",
                            Level        = "Error"
                        }.ExecuteCommand();
                    }
                    else if (throttlingEventCount > thresholdValues.DatabaseThrottlingEventWarningThreshold && LastNHours == 1)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = "Warning: SQL Azure DB alert activated for throttling/deadlock event",
                            Details      = string.Format("Number of events exceeded threshold for DB throttling/deadlock events. Warning Threshold count : {0}, events noticed in last hour : {1}, all additional data is {2}", thresholdValues.DatabaseThrottlingEventWarningThreshold, throttlingEventCount, sb.ToString()),
                            AlertName    = "Warning: SQL Azure DB throttling/deadlock event",
                            Component    = "SQL Azure Database",
                            Level        = "Warning"
                        }.ExecuteCommand();
                    }
                }
            }
        }
Exemple #11
0
        private string CheckoutForPackageStatics()
        {
            string         outputMessage;
            List <DbEntry> prodDB;
            List <DbEntry> warehouseDB;

            using (var sqlConnection = new SqlConnection(ConnectionString.ConnectionString))
            {
                using (var dbExecutor = new SqlExecutor(sqlConnection))
                {
                    sqlConnection.Open();
                    var content = dbExecutor.Query <DbEntry>(string.Format(prodSQL, DateTime.UtcNow.AddDays(-1).ToString("MM/dd/yyyy"), DateTime.UtcNow.ToString("MM/dd/yyyy")));
                    prodDB = content.ToList <DbEntry>();
                }
            }

            using (var sqlConnection = new SqlConnection(WarehouseDb.ConnectionString))
            {
                using (var dbExecutor = new SqlExecutor(sqlConnection))
                {
                    sqlConnection.Open();
                    var content = dbExecutor.Query <DbEntry>(string.Format(wareSQL, DateTime.UtcNow.AddDays(-1).ToString("MM/dd/yyyy")));
                    warehouseDB = content.ToList <DbEntry>();
                }
            }

            bool correct = true;

            string[] Operation = new JavaScriptSerializer().Deserialize <string[]>(ReportHelpers.Load(StorageAccount, "OperationType.json", ContainerName));
            Dictionary <string, int> proddict      = GenerateDict(prodDB, Operation);
            Dictionary <string, int> warehousedict = GenerateDict(warehouseDB, Operation);

            if (Math.Abs(warehousedict.Count - proddict.Count) > 10)
            {
                bool prod = true;
                correct = false;
                StringBuilder sb = new StringBuilder();
                if (warehousedict.Count > proddict.Count)
                {
                    prod = false;
                }
                if (prod)
                {
                    sb.Append("prod key is more than warehouse, the following is in prod but not in warehouse. detail: ");
                    foreach (string key in proddict.Keys)
                    {
                        if (!warehousedict.ContainsKey(key))
                        {
                            sb.Append(key + Environment.NewLine);
                        }
                    }
                }
                else
                {
                    sb.Append("warehouse key is more than prod, the following is in warehouse but not in prod. detail: ");
                    foreach (string key in warehousedict.Keys)
                    {
                        if (!proddict.ContainsKey(key))
                        {
                            sb.Append(key + Environment.NewLine);
                        }
                    }
                }
                outputMessage = string.Format("Package statistic total pacakage number is not correct on {0},more detail is {1}", DateTime.UtcNow.AddDays(-1).ToString("MM/dd/yyyy"), sb.ToString());
            }

            else
            {
                StringBuilder sb = new StringBuilder();
                foreach (string key in proddict.Keys)
                {
                    if (!warehousedict[key].Equals(proddict[key]))
                    {
                        correct = false;
                        sb.Append(key + " ");
                    }
                }
                outputMessage = string.Format("Package statistic is not correct on {0}, following package stat is not right, which are {1}", DateTime.UtcNow.AddDays(-1).ToString("MM/dd/yyyy"), sb.ToString());
            }

            if (!correct)
            {
                new SendAlertMailTask
                {
                    AlertSubject = "Error: Work service job background check alert activated for Package Statistics job",
                    Details      = outputMessage,
                    AlertName    = "Error: Alert for Package Statistics",
                    Component    = "Package Statistics Job",
                    Level        = "Error"
                }.ExecuteCommand();
            }
            return(outputMessage);
        }
        public override void ExecuteCommand()
        {
            AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            GetCurrentValueAndAlert(SqlQueryForConnectionCount, "DBConnections", thresholdValues.DatabaseConnectionsErrorThreshold, thresholdValues.DatabaseConnectionsWarningThreshold);
            GetCurrentValueAndAlert(SqlQueryForRequestCount, "DBRequests", thresholdValues.DatabaseRequestsErrorThreshold, thresholdValues.DatabaseRequestsWarningThreshold);
            GetCurrentValueAndAlert(SqlQueryForBlockedRequestCount, "DBSuspendedRequests", thresholdValues.DatabaseBlockedRequestsErrorThreshold, thresholdValues.DatabaseBlockedRequestsWarningThreshold);
            CreateReportForDBCPUUsage();
            CreateReportForRequestDetails();
        }
 private void CreateReportForIndexFragmentation()
 {
     using (var sqlConnection = new SqlConnection(ConnectionString.ConnectionString))
     {
         using (var dbExecutor = new SqlExecutor(sqlConnection))
         {
             sqlConnection.Open();
             AlertThresholds thresholdValues      = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
             var             fragmentationDetails = dbExecutor.Query <DatabaseIndex>(string.Format(sqlQueryForIndexFragmentation, thresholdValues.DatabaseIndexFragmentationPercentErrorThreshold));
             var             json = new JavaScriptSerializer().Serialize(fragmentationDetails);
             ReportHelpers.CreateBlob(StorageAccount, "DBIndexFragmentation.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
         }
     }
 }
        public override void ExecuteCommand()
        {
            NetworkCredential nc              = new NetworkCredential(SearchAdminUserName, SearchAdminKey);
            WebRequest        request         = WebRequest.Create(SearchEndPoint);
            AlertThresholds   thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            request.Credentials     = nc;
            request.PreAuthenticate = true;
            request.Method          = "GET";
            WebResponse respose = request.GetResponse();

            using (var reader = new StreamReader(respose.GetResponseStream()))
            {
                JavaScriptSerializer js = new JavaScriptSerializer();
                var    objects          = js.Deserialize <dynamic>(reader.ReadToEnd());
                var    process_info     = objects["process"];
                double cpusecond        = (double)process_info["cpuSeconds"];
                long   memory           = (long)process_info["virtualMemorySize"];
                int    cpuUsage         = 0;
                int    memUsage         = 0;

                if (cpuUsage > thresholdValues.SearchCpuPercentErrorThreshold)
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = "Error: Search Service Alert activated for cpu usage",
                        Details      = string.Format("Search service process cpu usage is above Error threshold: {0}% , it's {1}% ", thresholdValues.SearchCpuPercentErrorThreshold, cpuUsage.ToString()),
                        AlertName    = "Error: Alert for Serach CPU Usage",
                        Component    = "SearchService",
                        Level        = "Error"
                    }.ExecuteCommand();
                }
                else if (cpuUsage > thresholdValues.SearchCpuPercentWarningThreshold)
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = "Warning: Search Service Alert activated for cpu usage",
                        Details      = string.Format("Search service process cpu usage is above Warning threshold: {0}% , it's {1}% ", thresholdValues.SearchCpuPercentWarningThreshold, cpuUsage.ToString()),
                        AlertName    = "Warning: Alert for Serach CPU Usage",
                        Component    = "SearchService",
                        Level        = "Warning"
                    }.ExecuteCommand();
                }

                if (memUsage > thresholdValues.SearchMemErrorThresholdInGb * (1 << 30))
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = "Error: Search Service Alert activated for memory usage",
                        Details      = string.Format("Search service process memory usage is above Error threshold: {0}% GB, it's {1}% Byte ", thresholdValues.SearchMemErrorThresholdInGb, memUsage.ToString()),
                        AlertName    = "Error: Alert for Serach Memory Usage",
                        Component    = "SearchService",
                        Level        = "Error"
                    }.ExecuteCommand();
                }
                else if (memUsage > thresholdValues.SearchMemWarningThresholdInGb * (1 << 30))
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = "Warning: Search Service Alert activated for memory usage",
                        Details      = string.Format("Search service process memory usage is above Warning threshold {0}% GB, it's {1}% Byte ", thresholdValues.SearchMemWarningThresholdInGb, memUsage.ToString()),
                        AlertName    = "Warning: Alert for Serach Memory Usage",
                        Component    = "SearchService",
                        Level        = "Warning"
                    }.ExecuteCommand();
                }
                ReportHelpers.AppendDatatoBlob(StorageAccount, "SearchCpuUsage" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), cpusecond.ToString()), 24, ContainerName);
                ReportHelpers.AppendDatatoBlob(StorageAccount, "SearchMemUsage" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), memory.ToString()), 24, ContainerName);
            }
        }
Exemple #15
0
 public override void ExecuteCommand()
 {
     thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
     CheckLagBetweenCatalogAndLucene();
     DoIntegrityCheckBetweenCatalogAndLucene();
 }
        public override void ExecuteCommand()
        {
            AlertThresholds thresholds = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            List <ElmahError> listOfErrors   = new List <ElmahError>();
            RefreshElmahError RefreshExecute = new RefreshElmahError(StorageAccount, ContainerName, LastNHours, ElmahAccountCredentials);

            listOfErrors = RefreshExecute.ExecuteRefresh();

            foreach (ElmahError error in listOfErrors)
            {
                if (error.Severity == 0)
                {
                    if (error.Occurecnes > thresholds.ElmahCriticalErrorPerHourAlertErrorThreshold && LastNHours == 1)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = string.Format("Error: Elmah Error Alert activated for {0}", error.Error),
                            Details      = String.Format("Number of {0} exceeded Error threshold limit during the last hour.Threshold error count per hour : {1}, Events recorded in the last hour: {2}", error.Error, thresholds.ElmahCriticalErrorPerHourAlertErrorThreshold, error.Occurecnes.ToString()),
                            AlertName    = string.Format("Error: Elmah Error Alert for {0}", error.Error),
                            Component    = "Web Server",
                            Level        = "Error"
                        }.ExecuteCommand();
                    }
                    else if (error.Occurecnes > thresholds.ElmahCriticalErrorPerHourAlertWarningThreshold && LastNHours == 1)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = string.Format("Warning: Elmah Error Alert activated for {0}", error.Error),
                            Details      = String.Format("Number of {0} exceeded Warning threshold limit during the last hour.Threshold error count per hour : {1}, Events recorded in the last hour: {2}", error.Error, thresholds.ElmahCriticalErrorPerHourAlertWarningThreshold, error.Occurecnes.ToString()),
                            AlertName    = string.Format("Warning: Elmah Error Alert for {0}", error.Error),
                            Component    = "Web Server",
                            Level        = "Warning"
                        }.ExecuteCommand();
                    }
                }
            }

            var json = new JavaScriptSerializer().Serialize(listOfErrors);

            ReportHelpers.CreateBlob(StorageAccount, "ElmahErrorsDetailed" + LastNHours.ToString() + "hours.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
        }
        private void GetMicroServiceReportForCheck(string checkAlias, int CheckId)
        {
            DateTime startingTime = DateTime.Now.AddHours(-LastNhour);
            List <Tuple <string, string> > summaryValues = new List <Tuple <string, string> >();
            string            serviceStatus = "up";
            int               overallTime   = 60 * 60 * LastNhour; // in sec
            int               downtimeSum   = 0;                   // in secs
            long              fromTime      = UnixTimeStampUtility.GetUnixTimestampSeconds(startingTime.ToUniversalTime());
            long              toTime        = UnixTimeStampUtility.GetUnixTimestampSeconds(DateTime.Now.ToUniversalTime());
            NetworkCredential nc            = new NetworkCredential(UserName, Password);
            WebRequest        request       = WebRequest.Create(string.Format("https://api.pingdom.com/api/2.0/summary.outage/{0}?from={1}&to={2}", CheckId, fromTime, toTime));

            request.Credentials = nc;
            request.Headers.Add(AppKey);
            request.PreAuthenticate = true;
            request.Method          = "GET";
            WebResponse respose = request.GetResponse();
            List <Tuple <int, DateTime> > downRecord = new List <Tuple <int, DateTime> >();
            AlertThresholds thresholdValues          = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            using (var reader = new StreamReader(respose.GetResponseStream()))
            {
                JavaScriptSerializer js = new JavaScriptSerializer();
                var summaryObject       = js.Deserialize <dynamic>(reader.ReadToEnd());
                foreach (var summary in summaryObject["summary"])
                {
                    foreach (var states in summary.Value)
                    {
                        if (states["status"] == "down")
                        {
                            DateTime start = UnixTimeStampUtility.DateTimeFromUnixTimestampSeconds(states["timefrom"]).ToLocalTime();
                            DateTime end   = UnixTimeStampUtility.DateTimeFromUnixTimestampSeconds(states["timeto"]).ToLocalTime();


                            int downtime = (int)end.Subtract(start).TotalSeconds;
                            if (downtime > thresholdValues.PingdomServiceDistruptionErrorThresholdInSeconds)
                            {
                                serviceStatus = "down";
                                downRecord.Add(new Tuple <int, DateTime>(downtime, DateTime.Now));
                            }
                        }
                    }
                }
            }
            if (serviceStatus.Equals("down"))
            {
                StringBuilder sb = new StringBuilder();
                foreach (Tuple <int, DateTime> each in downRecord)
                {
                    sb.Append(string.Format("at {0}, there is {1} second down.", each.Item2.ToString(), each.Item1));
                    downtimeSum = downtimeSum + each.Item1; // in secs
                }

                new SendAlertMailTask
                {
                    AlertSubject = string.Format("Error: Alert for {0} pingdom service Down", checkAlias),
                    Details      = string.Format("Pingdom service {0} down time exceeded threshold: {1} second, in last {2} hours, there are {3} down happened, detail is {4}", checkAlias, thresholdValues.PingdomServiceDistruptionErrorThresholdInSeconds, LastNhour, downRecord.Count, sb.ToString()),
                    AlertName    = string.Format("Error: Pingdom Micro Service: {0}", checkAlias),
                    Component    = "Pingdom Service",
                    Level        = "Error"
                }.ExecuteCommand();
            }

            int serviceUpTime = overallTime - downtimeSum; // in secs

            ReportHelpers.AppendDatatoBlob(StorageAccount, checkAlias + string.Format("{0:MMdd}", DateTime.Now) + "outageReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), serviceUpTime.ToString()), 24, ContainerName);
        }
Exemple #18
0
        public override void ExecuteCommand()
        {
            AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
            int             diff            = GetTotalPackageCountFromDatabase() - GetTotalPackageCountFromLucene();

            if (diff > thresholdValues.LuceneIndexLagAlertErrorThreshold || diff < -200) //Increasing the value for negative lag due to bug https://github.com/NuGet/NuGetGallery/issues/2328/. TBD : Make the threshold configurable.
            {
                new SendAlertMailTask
                {
                    AlertSubject = "Error: Search Service Alert activated for Lucene index lag",
                    Details      = string.Format("Delta between the packages between in database and lucene index is {0}. Error Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertErrorThreshold),
                    AlertName    = "Error: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Error"
                }.ExecuteCommand();
            }
            else if (diff > thresholdValues.LuceneIndexLagAlertWarningThreshold)
            {
                new SendAlertMailTask
                {
                    AlertSubject = "Warning: Search Service Alert activated for Lucene index lag",
                    Details      = string.Format("Delta between the packages between in database and lucene index is {0}. Warning Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertWarningThreshold),
                    AlertName    = "Warning: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Warning"
                }.ExecuteCommand();
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), diff.ToString()), 24, ContainerName);
        }
Exemple #19
0
        public override void ExecuteCommand()
        {
            sqlQueryForDbAge = string.Format("select create_date from sys.databases where name = '{0}'", DbName);
            thresholdValues  = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
            List <Tuple <string, string> > jobOutputs = new List <Tuple <string, string> >();

            jobOutputs.Add(new Tuple <string, string>("SyncPackagesToFailoverDC", CheckLagBetweenDBAndBlob()));
            jobOutputs.Add(new Tuple <string, string>("ImportCompletionStatus", CheckForInCompleteDBImport()));
            JArray reportObject = ReportHelpers.GetJson(jobOutputs);

            ReportHelpers.CreateBlob(StorageAccount, "RunBackgroundCheckForFailoverDCReport.json", ContainerName, "application/json", ReportHelpers.ToStream(reportObject));
        }
Exemple #20
0
        public override void ExecuteCommand()
        {
            //Get the logs for the previous Hour as the current one is being used by Azure.
            string        DeploymentID  = new JavaScriptSerializer().Deserialize <string>(ReportHelpers.Load(StorageAccount, "DeploymentId_" + ServiceName + ".json", ContainerName));
            string        latestLogName = "u_ex" + string.Format("{0:yyMMddHH}", DateTime.UtcNow.AddHours(-1)) + ".log";
            DirectoryInfo info          = new System.IO.DirectoryInfo(Path.Combine(Environment.CurrentDirectory, latestLogName));

            //Downlog the logs for the last hour.
            while (RetryCount-- > 0)
            {
                try
                {
                    if (!Directory.Exists(info.FullName))
                    {
                        Directory.CreateDirectory(info.FullName);
                    }

                    int instanceCount = GetCurrentInstanceCountInGallery(); //get current instance count.
                    for (int i = 0; i < instanceCount; i++)
                    {
                        ReportHelpers.DownloadBlobToLocalFile(IISStorageAccount, DeploymentID + "/NuGetGallery/NuGetGallery_IN_" + i.ToString() + "/Web/W3SVC1273337584/" + latestLogName, Path.Combine(info.FullName, "IN" + i.ToString() + ".log"), "wad-iis-requestlogs");
                    }
                    break; // break if the operation succeeds without doing any retry.
                }
                catch (Exception e)
                {
                    Console.WriteLine(string.Format("Exception thrown while trying to create report : {0}", e.Message));
                }
            }
            //Create reports.
            CreateOverviewReport(info);
            CreateUriStemDetailedReport(info);
        }
Exemple #21
0
        public override void ExecuteCommand()
        {
            var thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(
                ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            var totalPackageCountInDatabase = GetTotalPackageCountFromDatabase();
            var luceneDetails = GetTotalPackageCountFromLucene();

            var difference = totalPackageCountInDatabase - luceneDetails.Item1 - 2;

            if (difference > thresholdValues.LuceneIndexLagAlertErrorThreshold)
            {
                new SendAlertMailTask
                {
                    AlertSubject            = string.Format("Consolidated Lucene index for {0} lagging behind database by {1} packages", SearchEndPoint, difference),
                    Details                 = string.Format("Delta between the packages between in database and Lucene index is {0}. Allowed Threshold lag : {1} packages", difference, thresholdValues.LuceneIndexLagAlertErrorThreshold),
                    AlertName               = "Error: Alert for LuceneIndexLag",
                    Component               = "SearchService",
                    Level                   = "Error",
                    DisableIncidentCreation = DisableIncidentCreation,
                    DisableNotification     = DisableNotification
                }.ExecuteCommand();
            }
            else if (difference > thresholdValues.LuceneIndexLagAlertWarningThreshold)
            {
                new SendAlertMailTask
                {
                    AlertSubject            = "Warning: Search Service Alert activated for Consolidated Lucene index lag",
                    Details                 = string.Format("Delta between the packages between in database and Lucene index is {0}. Warning Threshold lag : {1} packages", difference, thresholdValues.LuceneIndexLagAlertWarningThreshold),
                    AlertName               = "Warning: Alert for LuceneIndexLag",
                    Component               = "SearchService",
                    Level                   = "Warning",
                    DisableIncidentCreation = DisableIncidentCreation,
                    DisableNotification     = DisableNotification
                }.ExecuteCommand();
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, "ConsolidatedIndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json",
                                           new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), difference.ToString()), 24 * 12, ContainerName);

            var lastActivityTime      = GetLastCreatedOrEditedActivityTimeFromDatabase();
            var luceneCommitTimeStamp = luceneDetails.Item2;
            var indexLagInMinutes     = lastActivityTime.Subtract(luceneCommitTimeStamp).TotalMinutes;

            if (indexLagInMinutes > AllowedLagInMinutesSev1)
            {
                new SendAlertMailTask
                {
                    AlertSubject            = string.Format("Error: Consolidated Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(indexLagInMinutes, 2)),
                    Details                 = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but Lucene is updated @ {2}", Math.Round(indexLagInMinutes, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint),
                    AlertName               = "Error: Alert for LuceneIndexLag",
                    Component               = "SearchService",
                    Level                   = "Error",
                    EscPolicy               = "Sev1",
                    DisableIncidentCreation = DisableIncidentCreation,
                    DisableNotification     = DisableNotification
                }.ExecuteCommand();
            }
            else if (indexLagInMinutes > AllowedLagInMinutesSev2)
            {
                new SendAlertMailTask
                {
                    AlertSubject            = string.Format("Warning: Consolidated Lucene index for {0} out of date  by {1} minutes", SearchEndPoint, Math.Round(indexLagInMinutes, 2)),
                    Details                 = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but Lucene is updated @ {2}", Math.Round(indexLagInMinutes, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint),
                    AlertName               = "Warning: Alert for LuceneIndexLag",
                    Component               = "SearchService",
                    Level                   = "Error",
                    DisableIncidentCreation = DisableIncidentCreation,
                    DisableNotification     = DisableNotification
                }.ExecuteCommand();
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, "ConsolidatedIndexingLagCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json",
                                           new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), indexLagInMinutes.ToString(CultureInfo.InvariantCulture)), 24 * 12, ContainerName);
        }
Exemple #22
0
        public override void ExecuteCommand()
        {
            int lastNhour = 24;
            List <WorkInstanceDetail>     jobDetail       = new List <WorkInstanceDetail>();
            List <WorkJobInstanceDetails> instanceDetails = getWorkjobInstance();

            foreach (WorkJobInstanceDetails job in instanceDetails)
            {
                int    invocationCount = 0;
                double totalRunTime    = 0;
                int    faultCount      = 0;
                int    faultRate       = 0;
                int    runtime         = 0;
                Dictionary <string, List <string> > ErrorList = new Dictionary <string, List <string> >();
                string AdminKey = WorkServiceAdminKey;

                if (job.url.Contains("api-work-1"))
                {
                    AdminKey = WorkServiceFailoverAdminKey;
                }
                NetworkCredential nc = new NetworkCredential(WorkServiceUserName, AdminKey);
                //get all invocations in last 24 hours or last 10 invocations
                int no = (lastNhour * 60) / job.FrequencyInMinutes;
                if (no < 10)
                {
                    no = 10;
                }
                WebRequest request = WebRequest.Create(string.Format("{0}/instances/{1}?limit={2}", job.url, job.JobInstanceName, no));
                request.Credentials     = nc;
                request.PreAuthenticate = true;
                request.Method          = "GET";
                WebResponse respose = request.GetResponse();
                using (var reader = new StreamReader(respose.GetResponseStream()))
                {
                    JavaScriptSerializer js = new JavaScriptSerializer();
                    js.MaxJsonLength = Int32.MaxValue;
                    var objects = js.Deserialize <List <WorkJobInvocation> >(reader.ReadToEnd());
                    WorkJobInvocation lastJob;
                    bool   alert         = false;
                    string lastCompleted = string.Empty;
                    if (objects.Any((item => item.status.Equals("Executed") && item.result.Equals("Completed"))))
                    {
                        lastJob = objects.Where(item => item.status.Equals("Executed") && item.result.Equals("Completed")).ToList().FirstOrDefault();
                    }
                    else
                    {
                        lastJob = objects.FirstOrDefault();
                    }

                    if (lastJob != null)
                    {
                        lastCompleted = string.Format("{0} mins ago", Convert.ToInt32(DateTime.Now.Subtract(lastJob.completedAt).TotalMinutes));
                    }
                    else
                    {
                        lastCompleted = "N/A";
                    }

                    foreach (WorkJobInvocation each in objects)
                    {
                        if (each.result.Equals("Incomplete", StringComparison.OrdinalIgnoreCase))
                        {
                            continue;
                        }
                        if (each.completedAt >= DateTime.Now.AddHours(-1))
                        {
                            alert = true;                                                // check there is any failure happened in last one hour
                        }
                        invocationCount++;
                        totalRunTime += each.completedAt.Subtract(each.queuedAt).TotalSeconds;
                        if (each.result.Equals("Faulted"))
                        {
                            faultCount++;
                            string message = getResultMessage(each.resultMessage);
                            if (ErrorList.ContainsKey(message))
                            {
                                if (ErrorList[message].Count < 5)
                                {
                                    ErrorList[message].Add(each.logUrl);
                                }
                            }

                            else
                            {
                                List <string> LogUrl = new List <string>();
                                LogUrl.Add(each.logUrl);
                                ErrorList.Add(message, LogUrl);
                            }
                        }
                    }
                    if (invocationCount != 0)
                    {
                        faultRate = (faultCount * 100 / invocationCount);
                        runtime   = ((int)(totalRunTime / invocationCount));
                    }
                    jobDetail.Add(new WorkInstanceDetail(job.JobInstanceName, job.FrequencyInMinutes + "mins", lastCompleted, runtime.ToString() + "s", invocationCount.ToString(), faultCount.ToString(), faultRate, ErrorList));
                    AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
                    string[]        Igonored        = new JavaScriptSerializer().Deserialize <string[]>(ReportHelpers.Load(StorageAccount, "Configuration.WorkerJobToBeIgnored.json", ContainerName));
                    if (Igonored.Contains(job.JobInstanceName, StringComparer.OrdinalIgnoreCase))
                    {
                        continue;
                    }
                    if (faultRate > thresholdValues.WorkJobErrorThreshold && alert)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = string.Format("Error: Alert for Nuget Work Service : {0} failure", job.JobInstanceName),
                            Details      = string.Format("Rate of failure exceeded Error threshold for {0}. Threshold count : {1}%, failure in last 24 hour : {2}", job.JobInstanceName, thresholdValues.WorkJobErrorThreshold, faultCount),
                            AlertName    = string.Format("Error: Nuget Work Service {0}", job.JobInstanceName),
                            Component    = "Nuget Work Service",
                            Level        = "Error"
                        }.ExecuteCommand();
                    }
                    else if (faultRate > thresholdValues.WorkJobWarningThreshold && alert)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = string.Format("Warning: Alert for Nuget Work Service: {0} failure", job.JobInstanceName),
                            Details      = string.Format("Rate of failure exceeded Warning threshold for {0}. Threshold count : {1}%, failure in last 24 hour : {2}", job.JobInstanceName, thresholdValues.WorkJobWarningThreshold, faultCount),
                            AlertName    = string.Format("Warning: Nuget Work Service {0}", job.JobInstanceName),
                            Component    = "Nuget Work Service",
                            Level        = "Warning"
                        }.ExecuteCommand();
                    }
                }
                //check to make sure that the jobs that are not queued as part of scheduler are being invoked properly
                if (invocationCount < ((lastNhour * 60 / job.FrequencyInMinutes) / 2))
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = string.Format("Error: Alert for Nuget Work Service : {0} failure", job.JobInstanceName),
                        Details      = string.Format("In last 24 hours, invocation of {0} is only {1}, it's less than half of scheduled jobs", job.JobInstanceName, invocationCount),
                        AlertName    = string.Format("Error: Nuget Work Service {0}", job.JobInstanceName),
                        Component    = "Nuget Work Service",
                        Level        = "Error"
                    }.ExecuteCommand();
                }
            }

            List <WorkServiceAdmin> allkey = new List <WorkServiceAdmin>();

            allkey.Add(new WorkServiceAdmin(WorkServiceUserName, WorkServiceAdminKey));
            allkey.Add(new WorkServiceAdmin(WorkServiceUserName, WorkServiceFailoverAdminKey));
            var json = new JavaScriptSerializer().Serialize(jobDetail);
            var key  = new JavaScriptSerializer().Serialize(allkey);

            ReportHelpers.CreateBlob(StorageAccount, "WorkJobDetail.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
            ReportHelpers.CreateBlob(StorageAccount, "WorkServiceAdminKey.json", ContainerName, "application/json", ReportHelpers.ToStream(key));
        }
Exemple #23
0
        public override void ExecuteCommand()
        {
            thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
            List <Tuple <string, string> > jobOutputs = new List <Tuple <string, string> >();

            jobOutputs.Add(new Tuple <string, string>("PackageStatics", CheckoutForPackageStatics()));
            //jobOutputs.Add(new Tuple<string, string>("PurgePackageStatistics", CheckForPurgePackagStatisticsJob()));
            jobOutputs.Add(new Tuple <string, string>("HandleQueuedPackageEdits", CheckForHandleQueuedPackageEditJob()));
            // jobOutputs.Add(new Tuple<string, string>("BackupPackages", CheckForBackupPackagesJob())); commenting out this check temporarily as ListBlobs on ng-backups container is giving error.
            JArray reportObject = ReportHelpers.GetJson(jobOutputs);

            ReportHelpers.CreateBlob(StorageAccount, "RunBackGroundChecksForWorkerJobsReport.json", ContainerName, "application/json", ReportHelpers.ToStream(reportObject));
        }
        private void CreateReportForOperationTask()
        {
            DateTime date = DateTime.UtcNow.AddDays(-LastNDays);

            string[] agentVersion = new JavaScriptSerializer().Deserialize <string[]>(ReportHelpers.Load(StorageAccount, "agentVersion.json", ContainerName));
            string[] Operation    = new JavaScriptSerializer().Deserialize <string[]>(ReportHelpers.Load(StorageAccount, "OperationType.json", ContainerName));


            using (var sqlConnection = new SqlConnection(ConnectionString.ConnectionString))
            {
                using (var dbExecutor = new SqlExecutor(sqlConnection))
                {
                    sqlConnection.Open();

                    foreach (string opt in Operation)
                    {
                        List <agentRequest> result = new List <agentRequest>();
                        foreach (string version in agentVersion)
                        {
                            string major = version[0].ToString();
                            string minor = version[2].ToString();
                            try
                            {
                                var requests = dbExecutor.Query <Int32>(string.Format(sqlQueryForOperation, major, minor, opt, date.ToString("yyyy-MM-dd"))).SingleOrDefault();
                                result.Add(new agentRequest(version, requests));
                            }

                            catch
                            {
                                result.Add(new agentRequest(version, 0));
                            }
                        }


                        var json = new JavaScriptSerializer().Serialize(result);
                        ReportHelpers.CreateBlob(StorageAccount, opt + LastNDays.ToString() + "Day.json", ContainerName, "application/json", ReportHelpers.ToStream(json));
                    }
                }
            }
        }
 public override void ExecuteCommand()
 {
     thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
     //Check last activity in DB and last activity in catalog doesn't vary more than allowed threshold.
     CheckLagBetweenDBAndCatalog();
     //Check all newly uploaded packages in DB are present in catalog.
     DoIntegrityCheckBetweenDBAndCatalog();
 }
        public override void ExecuteCommand()
        {
            AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));
            int             diff            = GetTotalPackageCountFromDatabase() - GetTotalPackageCountFromLucene();

            if (diff > thresholdValues.LuceneIndexLagAlertErrorThreshold || diff < -200) //Increasing the value for negative lag due to bug https://github.com/NuGet/NuGetGallery/issues/2328/. TBD : Make the threshold configurable.
            {
                new SendAlertMailTask
                {
                    AlertSubject = string.Format("Lucene index for {0} lagging behind database by {1} packages", SearchEndPoint, diff),
                    Details      = string.Format("Delta between the packages between in database and lucene index is {0}. Allowed Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertErrorThreshold),
                    AlertName    = "Error: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Error"
                }.ExecuteCommand();
            }
            else if (diff > thresholdValues.LuceneIndexLagAlertWarningThreshold)
            {
                new SendAlertMailTask
                {
                    AlertSubject = "Warning: Search Service Alert activated for Lucene index lag",
                    Details      = string.Format("Delta between the packages between in database and lucene index is {0}. Warning Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertWarningThreshold),
                    AlertName    = "Warning: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Warning"
                }.ExecuteCommand();
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), diff.ToString()), 24 * 12, ContainerName);

            DateTime lastActivityTime      = GetLastCreatedOrEditedActivityTimeFromDB();
            DateTime luceneCommitTimeStamp = GetCommitTimeStampFromLucene();
            double   lag = lastActivityTime.Subtract(luceneCommitTimeStamp).TotalMinutes;

            if (lag > AllowedLagInMinutesSev1)
            {
                new SendAlertMailTask
                {
                    AlertSubject = string.Format("Error: Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(lag, 2)),
                    Details      = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but lucene is update @ {2}", Math.Round(lag, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint),
                    AlertName    = "Error: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Error",
                    EscPolicy    = "Sev1"
                }.ExecuteCommand();
            }

            else if (lag > AllowedLagInMinutesSev2)
            {
                new SendAlertMailTask
                {
                    AlertSubject = string.Format("Warning: Lucene index for {0} out of date  by {1} minutes", SearchEndPoint, Math.Round(lag, 2)),
                    Details      = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but lucene is update @ {2}", Math.Round(lag, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint),
                    AlertName    = "Warning: Alert for LuceneIndexLag",
                    Component    = "SearchService",
                    Level        = "Error"
                }.ExecuteCommand();
            }

            ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingLagCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), lag.ToString()), 24 * 12, ContainerName);
        }
Exemple #27
0
        private void SendAlerts(long Upload, long Download, long Search, int Catalog, double ResolverBlobs)
        {
            AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

            if (Upload > thresholdValues.UploadPackageThreshold)
            {
                CreateAlert(Upload.ToString(), thresholdValues.UploadPackageThreshold.ToString(), "Upload", "Upload API");
            }

            if (Download > thresholdValues.DownloadPackageThreshold)
            {
                CreateAlert(Download.ToString(), thresholdValues.DownloadPackageThreshold.ToString(), "Download", "Download API");
            }

            if (Search > thresholdValues.SearchPackageThreshold)
            {
                CreateAlert(Search.ToString(), thresholdValues.SearchPackageThreshold.ToString(), "Search", "Search API");
            }

            if (Catalog > thresholdValues.CatalogLagThreshold)
            {
                CreateAlert(Catalog.ToString(), thresholdValues.CatalogLagThreshold.ToString(), "Catalog Lag", "Database to Catalog");
            }

            if (ResolverBlobs > thresholdValues.ResolverLagThreshold)
            {
                CreateAlert(ResolverBlobs.ToString(), thresholdValues.ResolverLagThreshold.ToString(), "Resolver Lag", "Catalog to Resolver Blobs");
            }
        }
Exemple #28
0
        public override void ExecuteCommand()
        {
            CloudStorageAccount LogStorageAccount = CloudStorageAccount.Parse(LogStorageUri);
            CloudBlobClient     blobClient        = LogStorageAccount.CreateCloudBlobClient();
            CloudBlobContainer  container         = blobClient.GetContainerReference(LogStorageContainerName);
            var blobs = container.ListBlobs(null, false, BlobListingDetails.None);
            var dirs  = blobs.Where(b => b as CloudBlobDirectory != null).ToList();

            foreach (CloudBlobDirectory task in dirs)
            {
                ErrorCount = 0;
                Successed  = 0;
                int    len    = task.Prefix.Length;
                int    adjust = "yyyy/MM/dd/hh/mm/ss/xxxxxxx/NUGET-PROD-JOBS/".Length;
                string date   = string.Format("{0:yyyy/MM/dd/}", DateTime.UtcNow);
                errorFile = new StringBuilder();
                runs      = new List <string>();

                if (!task.Prefix.ToString().Contains("Ng"))
                {
                    int days = 1;
                    while (true)
                    {
                        var allblobs = container.ListBlobs(prefix: task.Prefix + date, useFlatBlobListing: true, blobListingDetails: BlobListingDetails.None).OrderByDescending(e => (e as CloudBlockBlob).Name).Where(e => (e as CloudBlockBlob).Name.Substring(len + adjust).Equals("ended.txt")).Take(20);

                        int count = 0;
                        date = string.Format("{0:yyyy/MM/dd/}", DateTime.UtcNow.AddDays(-days));
                        foreach (var blob in allblobs)
                        {
                            count++;
                            bool result = EndfileCheck((blob as CloudBlockBlob).Name.ToString(), container, task.Prefix.Replace(@"\", ""));
                        }
                        if (count != 0 || days > 5)
                        {
                            break;
                        }
                        days++;
                    }
                }
                else
                {
                    CollectJobRun(task);
                    int runToday = 0;
                    foreach (string run in runs)
                    {
                        bool result = EndfileCheck(run + @"ended.txt", container, task.Prefix.Replace(@"\", ""));
                        if (run.Contains(date))
                        {
                            runToday++;
                        }
                    }

                    if (runToday >= 5)
                    {
                        new SendAlertMailTask
                        {
                            AlertSubject = string.Format("Warning: Alert for VM jobs : {0} ", task.Prefix.Replace(@"\", "")),
                            Details      = string.Format("{0} task have more than 5 runs today, we should check it.", task.Prefix.Replace(@"\", "")),
                            AlertName    = string.Format("Error: Vm jobs {0}", task.Prefix.Replace(@"\", "")),
                            Component    = "Vm jobs",
                            Level        = "Error"
                        }.ExecuteCommand();
                    }
                }

                AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName));

                if (Successed != 0 && ErrorCount * 100 / (Successed + ErrorCount) > thresholdValues.WorkJobErrorThreshold)
                {
                    new SendAlertMailTask
                    {
                        AlertSubject = string.Format("Error: Alert for VM jobs : {0} failure", task.Prefix.Replace(@"\", "")),
                        Details      = string.Format("{0} Rate of failure exceeded Error threshold {1}%. in last 20 runs, following run are failed: {2}", task.Prefix.Replace(@"\", ""), thresholdValues.WorkJobErrorThreshold, ErrorCount, errorFile.ToString()),
                        AlertName    = string.Format("Error: Vm jobs {0}", task.Prefix.Replace(@"\", "")),
                        Component    = "Vm jobs",
                        Level        = "Error"
                    }.ExecuteCommand();
                }
            }
        }