public override void ExecuteCommand() { AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); int diff = GetTotalPackageCountFromDatabase() - GetTotalPackageCountFromLucene(); if (diff > thresholdValues.LuceneIndexLagAlertErrorThreshold || diff < -200) //Increasing the value for negative lag due to bug https://github.com/NuGet/NuGetGallery/issues/2328/. TBD : Make the threshold configurable. { new SendAlertMailTask { AlertSubject = "Error: Search Service Alert activated for Lucene index lag", Details = string.Format("Delta between the packages between in database and lucene index is {0}. Error Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertErrorThreshold), AlertName = "Error: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error" }.ExecuteCommand(); } else if (diff > thresholdValues.LuceneIndexLagAlertWarningThreshold) { new SendAlertMailTask { AlertSubject = "Warning: Search Service Alert activated for Lucene index lag", Details = string.Format("Delta between the packages between in database and lucene index is {0}. Warning Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertWarningThreshold), AlertName = "Warning: Alert for LuceneIndexLag", Component = "SearchService", Level = "Warning" }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), diff.ToString()), 24, ContainerName); }
private void CreateUserAgentReport(DirectoryInfo info) { string standardError = string.Empty; string standardOutput = string.Empty; List <IISUserAgentDetails> userAgentDetails = new List <IISUserAgentDetails>(); var content = ReportHelpers.Load(StorageAccount, "Configuration.IISUserAgent.json", ContainerName); List <IISUserAgentDetails> userAgents = new List <IISUserAgentDetails>(); userAgents = new JavaScriptSerializer().Deserialize <List <IISUserAgentDetails> >(content); foreach (IISUserAgentDetails agent in userAgents) { string query = string.Format(@"select count(*) from {0}\*{1}*.log WHERE cs(User-Agent) LIKE '{2}'", info.FullName, ReportDate, agent.UserAgent); int requestCount = InvokeLogParserProcessForUserAgent(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF"); int avgTime = 0; if (requestCount > 0) { query = string.Format(@"select avg(time-taken) from {0}\*{1}*.log WHERE cs(User-Agent) LIKE '{2}'", info.FullName, ReportDate, agent.UserAgent); avgTime = InvokeLogParserProcessForUserAgent(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF"); } userAgentDetails.Add(new IISUserAgentDetails(agent.UserAgentName, agent.UserAgent, avgTime, requestCount)); } string blobName = "IISUserAgentDetails" + ReportDate + ".json"; int count = 0; foreach (IISUserAgentDetails detail in userAgentDetails) { var json = new JavaScriptSerializer().Serialize(detail); ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple <string, string>(count.ToString(), json), userAgentDetails.Count, ContainerName); count++; } }
private void CreateInstanceCountReport(XmlDocument doc) { XmlNodeList roleInstanceNodes = doc.GetElementsByTagName("RoleInstance", "http://schemas.microsoft.com/windowsazure"); Console.WriteLine(roleInstanceNodes.Count); ReportHelpers.AppendDatatoBlob(StorageAccount, ServiceName + "InstanceCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), roleInstanceNodes.Count.ToString()), 24, ContainerName); }
/// <summary> /// Creates report for Over all RequestsPerHour /// </summary> /// <param name="info"></param> private void CreateOverviewReport(DirectoryInfo info) { int requestCount = GetDataForUriStem("%", "count (*)", info.FullName); string blobName = "IISRequests" + string.Format("{0:MMdd}", DateTime.Now.AddHours(-1)) + ".json"; Tuple <string, string> datapoint = new Tuple <string, string>(string.Format("{0:HH:00}", DateTime.Now.AddHours(-1)), requestCount.ToString()); ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, datapoint, 50, ContainerName); }
public override void ExecuteCommand() { TableErrorLog log = new TableErrorLog(string.Format(ElmahAccountCredentials)); List <ErrorLogEntry> entities = new List <ErrorLogEntry>(); log.GetErrors(0, 500, entities); //retrieve n * LastNHours errors assuming a max of 500 errors per hour. int count = entities.Where(entity => entity.Error.Time.ToUniversalTime() > DateTime.UtcNow.AddHours(-1) && entity.Error.Time.ToUniversalTime() < DateTime.UtcNow).ToList().Count; ReportHelpers.AppendDatatoBlob(StorageAccount, "ErrorRate" + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple <string, string>(String.Format("{0:HH:mm}", DateTime.Now), count.ToString()), 50, ContainerName); }
public override void ExecuteCommand() { StopWatches timer = new StopWatches(); DateTime today = DateTime.Today; string day = string.Format("{0:yyyy-MM-dd}", today); string version = string.Empty; string file = Path.Combine(Environment.CurrentDirectory, TestPackageName + ".nupkg"); string newPackage = GetNewPackage(file, out version); //upload Console.WriteLine("Pushing :{0}", newPackage); long UploadTimeElapsed = UploadPackage(timer, newPackage, Source, ApiKey); ReportHelpers.AppendDatatoBlob(StorageAccount, ("UploadPackageTimeElapsed" + day + ".json"), new Tuple <string, string>(string.Format("{0:HH:mm}", DateTime.Now), UploadTimeElapsed.ToString()), 48, ContainerName); File.Delete("backup"); //download long DownloadTimeElapsed = -1; Task <string> result = null; result = DownloadPackageFromFeed(timer, TestPackageName, version, out DownloadTimeElapsed); Console.WriteLine(result.Status); DownloadTimeElapsed = timer.DownloadTimeElapsed.ElapsedMilliseconds; ReportHelpers.AppendDatatoBlob(StorageAccount, ("DownloadPackageTimeElapsed" + day + ".json"), new Tuple <string, string>(string.Format("{0:HH:mm}", DateTime.Now), DownloadTimeElapsed.ToString()), 48, ContainerName); //search long SearchTimeElapsed = -1; //SearchPackage is called until the uploaded package is seen in the search result while (SearchTimeElapsed == -1) { SearchTimeElapsed = SearchPackage(timer, TestPackageName, version); } ReportHelpers.AppendDatatoBlob(StorageAccount, ("SearchPackageTimeElapsed" + day + ".json"), new Tuple <string, string>(string.Format("{0:HH:mm}", DateTime.Now), SearchTimeElapsed.ToString()), 48, ContainerName); //catalog lag JToken timeStampCatalog; int CatalogLag = DBToCatalogLag(timer, TestPackageName, out timeStampCatalog); ReportHelpers.AppendDatatoBlob(StorageAccount, ("CatalogLag" + day + ".json"), new Tuple <string, string>(string.Format("{0:HH:mm}", DateTime.Now), CatalogLag.ToString()), 48, ContainerName); ReportHelpers.CreateBlob(StorageAccount, ("LastCatalogTimeStamp.json"), ContainerName, "SqlDateTime", ReportHelpers.ToStream(timeStampCatalog)); //resolver lag JToken timeStampResolver; double ResolverLag = CatalogToResolverLag(out timeStampResolver); ReportHelpers.AppendDatatoBlob(StorageAccount, ("ResolverLag" + day + ".json"), new Tuple <string, string>(string.Format("{0:HH:mm}", DateTime.Now), ResolverLag.ToString()), 48, ContainerName); ReportHelpers.CreateBlob(StorageAccount, ("LastResolverTimeStamp.json"), ContainerName, "SqlDateTime", ReportHelpers.ToStream(timeStampResolver)); SendAlerts(UploadTimeElapsed, DownloadTimeElapsed, SearchTimeElapsed, CatalogLag, ResolverLag); }
private void GetCurrentValueAndAlert(string sqlQuery, string blobName, int error, int warning) { List <Tuple <string, string> > connectionCountDataPoints = new List <Tuple <string, string> >(); StringBuilder message = new StringBuilder(); if (blobName.Equals("DBSuspendedRequests")) { message.Append("all requests wait_type are"); foreach (DatabaseRequest rq in GetDBRequest()) { message.Append(rq.Wait_Type + "\n"); } } using (var sqlConnection = new SqlConnection(ConnectionString.ConnectionString)) { using (var dbExecutor = new SqlExecutor(sqlConnection)) { sqlConnection.Open(); var connectionCount = dbExecutor.Query <Int32>(sqlQuery).SingleOrDefault(); if (connectionCount > error) { new SendAlertMailTask { AlertSubject = string.Format("Error: SQL Azure database alert activated for {0}", blobName), Details = string.Format("Number of {0} exceeded the Error threshold value. Threshold value {1}, Current value : {2}.{3}", blobName, error, connectionCount, message), AlertName = "Error: SQL Azure DB alert for connections/requests count", Component = "SQL Azure database", Level = "Error" }.ExecuteCommand(); } else if (connectionCount > warning) { new SendAlertMailTask { AlertSubject = string.Format("Warning: SQL Azure database alert activated for {0}", blobName), Details = string.Format("Number of {0} exceeded the Warning threshold value. Threshold value {1}, Current value : {2}.{3}", blobName, warning, connectionCount, message), AlertName = "Warning: SQL Azure DB alert for connections/requests count", Component = "SQL Azure database", Level = "Warning" }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, blobName + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple <string, string>(String.Format("{0:HH:mm}", DateTime.Now), connectionCount.ToString()), 50, ContainerName); } } }
private void CreateIPDetailsReport(DirectoryInfo info) { string standardError = string.Empty; string standardOutput = string.Empty; List <IISIPDetails> ipDetails = new List <IISIPDetails>(); string query = string.Format(@"select c-ip, avg(time-taken), count(*) from {0}\*{1}*.log GROUP BY c-ip", info.FullName, ReportDate); ipDetails = InvokeLogParserProcessForIPDetails(@"-i:IISW3C -o:CSV " + @"""" + query + @"""" + " -stats:OFF", 3); if (ipDetails.Count > 0) { string blobName = "IISIPDetails" + ReportDate + ".json"; int count = 0; foreach (IISIPDetails detail in ipDetails) { var json = new JavaScriptSerializer().Serialize(detail); ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple <string, string>(count.ToString(), json), ipDetails.Count, ContainerName); count++; } } }
/// <summary> /// Creates report for count and avg time taken for individual scenarios. /// </summary> /// <param name="info"></param> private void CreateUriStemDetailedReport(DirectoryInfo info) { List <IISRequestDetails> requestDetails = new List <IISRequestDetails>(); var content = ReportHelpers.Load(StorageAccount, "Configration.IISRequestStems.json", ContainerName); List <IISRequestDetails> UriStems = new List <IISRequestDetails>(); UriStems = new JavaScriptSerializer().Deserialize <List <IISRequestDetails> >(content); foreach (IISRequestDetails stem in UriStems) { int requestCount = GetDataForUriStem(stem.UriStem, "count (*)", info.FullName); int avgTime = 0; if (requestCount > 0) { avgTime = GetDataForUriStem(stem.UriStem, "avg (time-taken)", info.FullName); } requestDetails.Add(new IISRequestDetails(stem.ScenarioName, stem.UriStem, avgTime, requestCount)); } var json = new JavaScriptSerializer().Serialize(requestDetails); string blobName = "IISRequestDetails" + string.Format("{0:MMdd}", DateTime.Now.AddHours(-1)) + ".json"; ReportHelpers.AppendDatatoBlob(StorageAccount, blobName, new Tuple <string, string>(string.Format("{0:HH:00}", DateTime.Now.AddHours(-1)), json), 50, ContainerName); }
public override void ExecuteCommand() { string DeployId = new JavaScriptSerializer().Deserialize <string>(ReportHelpers.Load(StorageAccount, "DeploymentId_" + ServiceName + ".json", ContainerName)); CloudStorageAccount storageAccount = CloudStorageAccount.Parse(PerfCounterTableStorageAccount); CloudTableClient tableClient = storageAccount.CreateCloudTableClient(); CloudTable table = tableClient.GetTableReference("WAD" + DeployId + "PT5MRTable"); int count = 0; double sum = 0; TableQuery <dataEntity> rangeQuery = new TableQuery <dataEntity>().Where(TableQuery.CombineFilters( TableQuery.GenerateFilterConditionForDate("Timestamp", QueryComparisons.GreaterThan, DateTime.UtcNow.AddMinutes(-frequencyInMin)), TableOperators.And, TableQuery.GenerateFilterCondition("CounterName", QueryComparisons.Equal, PerfCounterName))); foreach (dataEntity entity in table.ExecuteQuery(rangeQuery)) { count++; sum += entity.Total / entity.Count; } ReportHelpers.AppendDatatoBlob(StorageAccount, ServiceName + PerfCounterName + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple <string, string>(String.Format("{0:HH:mm}", DateTime.Now), (sum / count).ToString("F")), 24 * 60 / frequencyInMin, ContainerName); }
private void CreateWeeklyStatReportFor(string connectionString, string sqlQuery, string reportName) { startingTime = DateTime.Now.AddHours(-1).ToUniversalTime(); //initialize to day 01 of the given month. DateTime endTime = DateTime.Now.ToUniversalTime(); List <Tuple <string, string> > uploadsDataPoints = new List <Tuple <string, string> >(); using (var sqlConnection = new SqlConnection(connectionString)) { using (var dbExecutor = new SqlExecutor(sqlConnection)) { sqlConnection.Open(); try { var count = dbExecutor.Query <Int32>(string.Format(sqlQuery, startingTime.ToString("yyyy-MM-dd HH:mm:ss"), endTime.ToString("yyyy-MM-dd HH:mm:ss"))).SingleOrDefault(); ReportHelpers.AppendDatatoBlob(StorageAccount, reportName + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH:mm}", endTime.ToLocalTime()), count.ToString()), 50, ContainerName); } catch (NullReferenceException) { uploadsDataPoints.Add(new Tuple <string, string>("0", "0")); } } } }
public override void ExecuteCommand() { NetworkCredential nc = new NetworkCredential(SearchAdminUserName, SearchAdminKey); WebRequest request = WebRequest.Create(SearchEndPoint); AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); request.Credentials = nc; request.PreAuthenticate = true; request.Method = "GET"; WebResponse respose = request.GetResponse(); using (var reader = new StreamReader(respose.GetResponseStream())) { JavaScriptSerializer js = new JavaScriptSerializer(); var objects = js.Deserialize <dynamic>(reader.ReadToEnd()); var process_info = objects["process"]; double cpusecond = (double)process_info["cpuSeconds"]; long memory = (long)process_info["virtualMemorySize"]; int cpuUsage = 0; int memUsage = 0; if (cpuUsage > thresholdValues.SearchCpuPercentErrorThreshold) { new SendAlertMailTask { AlertSubject = "Error: Search Service Alert activated for cpu usage", Details = string.Format("Search service process cpu usage is above Error threshold: {0}% , it's {1}% ", thresholdValues.SearchCpuPercentErrorThreshold, cpuUsage.ToString()), AlertName = "Error: Alert for Serach CPU Usage", Component = "SearchService", Level = "Error" }.ExecuteCommand(); } else if (cpuUsage > thresholdValues.SearchCpuPercentWarningThreshold) { new SendAlertMailTask { AlertSubject = "Warning: Search Service Alert activated for cpu usage", Details = string.Format("Search service process cpu usage is above Warning threshold: {0}% , it's {1}% ", thresholdValues.SearchCpuPercentWarningThreshold, cpuUsage.ToString()), AlertName = "Warning: Alert for Serach CPU Usage", Component = "SearchService", Level = "Warning" }.ExecuteCommand(); } if (memUsage > thresholdValues.SearchMemErrorThresholdInGb * (1 << 30)) { new SendAlertMailTask { AlertSubject = "Error: Search Service Alert activated for memory usage", Details = string.Format("Search service process memory usage is above Error threshold: {0}% GB, it's {1}% Byte ", thresholdValues.SearchMemErrorThresholdInGb, memUsage.ToString()), AlertName = "Error: Alert for Serach Memory Usage", Component = "SearchService", Level = "Error" }.ExecuteCommand(); } else if (memUsage > thresholdValues.SearchMemWarningThresholdInGb * (1 << 30)) { new SendAlertMailTask { AlertSubject = "Warning: Search Service Alert activated for memory usage", Details = string.Format("Search service process memory usage is above Warning threshold {0}% GB, it's {1}% Byte ", thresholdValues.SearchMemWarningThresholdInGb, memUsage.ToString()), AlertName = "Warning: Alert for Serach Memory Usage", Component = "SearchService", Level = "Warning" }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "SearchCpuUsage" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), cpusecond.ToString()), 24, ContainerName); ReportHelpers.AppendDatatoBlob(StorageAccount, "SearchMemUsage" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), memory.ToString()), 24, ContainerName); } }
public override void ExecuteCommand() { var thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>( ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); var totalPackageCountInDatabase = GetTotalPackageCountFromDatabase(); var luceneDetails = GetTotalPackageCountFromLucene(); var difference = totalPackageCountInDatabase - luceneDetails.Item1 - 2; if (difference > thresholdValues.LuceneIndexLagAlertErrorThreshold) { new SendAlertMailTask { AlertSubject = string.Format("Consolidated Lucene index for {0} lagging behind database by {1} packages", SearchEndPoint, difference), Details = string.Format("Delta between the packages between in database and Lucene index is {0}. Allowed Threshold lag : {1} packages", difference, thresholdValues.LuceneIndexLagAlertErrorThreshold), AlertName = "Error: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error", DisableIncidentCreation = DisableIncidentCreation, DisableNotification = DisableNotification }.ExecuteCommand(); } else if (difference > thresholdValues.LuceneIndexLagAlertWarningThreshold) { new SendAlertMailTask { AlertSubject = "Warning: Search Service Alert activated for Consolidated Lucene index lag", Details = string.Format("Delta between the packages between in database and Lucene index is {0}. Warning Threshold lag : {1} packages", difference, thresholdValues.LuceneIndexLagAlertWarningThreshold), AlertName = "Warning: Alert for LuceneIndexLag", Component = "SearchService", Level = "Warning", DisableIncidentCreation = DisableIncidentCreation, DisableNotification = DisableNotification }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "ConsolidatedIndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), difference.ToString()), 24 * 12, ContainerName); var lastActivityTime = GetLastCreatedOrEditedActivityTimeFromDatabase(); var luceneCommitTimeStamp = luceneDetails.Item2; var indexLagInMinutes = lastActivityTime.Subtract(luceneCommitTimeStamp).TotalMinutes; if (indexLagInMinutes > AllowedLagInMinutesSev1) { new SendAlertMailTask { AlertSubject = string.Format("Error: Consolidated Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(indexLagInMinutes, 2)), Details = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but Lucene is updated @ {2}", Math.Round(indexLagInMinutes, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint), AlertName = "Error: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error", EscPolicy = "Sev1", DisableIncidentCreation = DisableIncidentCreation, DisableNotification = DisableNotification }.ExecuteCommand(); } else if (indexLagInMinutes > AllowedLagInMinutesSev2) { new SendAlertMailTask { AlertSubject = string.Format("Warning: Consolidated Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(indexLagInMinutes, 2)), Details = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but Lucene is updated @ {2}", Math.Round(indexLagInMinutes, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint), AlertName = "Warning: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error", DisableIncidentCreation = DisableIncidentCreation, DisableNotification = DisableNotification }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "ConsolidatedIndexingLagCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), indexLagInMinutes.ToString(CultureInfo.InvariantCulture)), 24 * 12, ContainerName); }
private void GetMicroServiceReportForCheck(string checkAlias, int CheckId) { DateTime startingTime = DateTime.Now.AddHours(-LastNhour); List <Tuple <string, string> > summaryValues = new List <Tuple <string, string> >(); string serviceStatus = "up"; int overallTime = 60 * 60 * LastNhour; // in sec int downtimeSum = 0; // in secs long fromTime = UnixTimeStampUtility.GetUnixTimestampSeconds(startingTime.ToUniversalTime()); long toTime = UnixTimeStampUtility.GetUnixTimestampSeconds(DateTime.Now.ToUniversalTime()); NetworkCredential nc = new NetworkCredential(UserName, Password); WebRequest request = WebRequest.Create(string.Format("https://api.pingdom.com/api/2.0/summary.outage/{0}?from={1}&to={2}", CheckId, fromTime, toTime)); request.Credentials = nc; request.Headers.Add(AppKey); request.PreAuthenticate = true; request.Method = "GET"; WebResponse respose = request.GetResponse(); List <Tuple <int, DateTime> > downRecord = new List <Tuple <int, DateTime> >(); AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); using (var reader = new StreamReader(respose.GetResponseStream())) { JavaScriptSerializer js = new JavaScriptSerializer(); var summaryObject = js.Deserialize <dynamic>(reader.ReadToEnd()); foreach (var summary in summaryObject["summary"]) { foreach (var states in summary.Value) { if (states["status"] == "down") { DateTime start = UnixTimeStampUtility.DateTimeFromUnixTimestampSeconds(states["timefrom"]).ToLocalTime(); DateTime end = UnixTimeStampUtility.DateTimeFromUnixTimestampSeconds(states["timeto"]).ToLocalTime(); int downtime = (int)end.Subtract(start).TotalSeconds; if (downtime > thresholdValues.PingdomServiceDistruptionErrorThresholdInSeconds) { serviceStatus = "down"; downRecord.Add(new Tuple <int, DateTime>(downtime, DateTime.Now)); } } } } } if (serviceStatus.Equals("down")) { StringBuilder sb = new StringBuilder(); foreach (Tuple <int, DateTime> each in downRecord) { sb.Append(string.Format("at {0}, there is {1} second down.", each.Item2.ToString(), each.Item1)); downtimeSum = downtimeSum + each.Item1; // in secs } new SendAlertMailTask { AlertSubject = string.Format("Error: Alert for {0} pingdom service Down", checkAlias), Details = string.Format("Pingdom service {0} down time exceeded threshold: {1} second, in last {2} hours, there are {3} down happened, detail is {4}", checkAlias, thresholdValues.PingdomServiceDistruptionErrorThresholdInSeconds, LastNhour, downRecord.Count, sb.ToString()), AlertName = string.Format("Error: Pingdom Micro Service: {0}", checkAlias), Component = "Pingdom Service", Level = "Error" }.ExecuteCommand(); } int serviceUpTime = overallTime - downtimeSum; // in secs ReportHelpers.AppendDatatoBlob(StorageAccount, checkAlias + string.Format("{0:MMdd}", DateTime.Now) + "outageReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), serviceUpTime.ToString()), 24, ContainerName); }
public override void ExecuteCommand() { AlertThresholds thresholdValues = new JavaScriptSerializer().Deserialize <AlertThresholds>(ReportHelpers.Load(StorageAccount, "Configuration.AlertThresholds.json", ContainerName)); int diff = GetTotalPackageCountFromDatabase() - GetTotalPackageCountFromLucene(); if (diff > thresholdValues.LuceneIndexLagAlertErrorThreshold || diff < -200) //Increasing the value for negative lag due to bug https://github.com/NuGet/NuGetGallery/issues/2328/. TBD : Make the threshold configurable. { new SendAlertMailTask { AlertSubject = string.Format("Lucene index for {0} lagging behind database by {1} packages", SearchEndPoint, diff), Details = string.Format("Delta between the packages between in database and lucene index is {0}. Allowed Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertErrorThreshold), AlertName = "Error: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error" }.ExecuteCommand(); } else if (diff > thresholdValues.LuceneIndexLagAlertWarningThreshold) { new SendAlertMailTask { AlertSubject = "Warning: Search Service Alert activated for Lucene index lag", Details = string.Format("Delta between the packages between in database and lucene index is {0}. Warning Threshold lag : {1} packages", diff.ToString(), thresholdValues.LuceneIndexLagAlertWarningThreshold), AlertName = "Warning: Alert for LuceneIndexLag", Component = "SearchService", Level = "Warning" }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingDiffCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), diff.ToString()), 24 * 12, ContainerName); DateTime lastActivityTime = GetLastCreatedOrEditedActivityTimeFromDB(); DateTime luceneCommitTimeStamp = GetCommitTimeStampFromLucene(); double lag = lastActivityTime.Subtract(luceneCommitTimeStamp).TotalMinutes; if (lag > AllowedLagInMinutesSev1) { new SendAlertMailTask { AlertSubject = string.Format("Error: Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(lag, 2)), Details = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but lucene is update @ {2}", Math.Round(lag, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint), AlertName = "Error: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error", EscPolicy = "Sev1" }.ExecuteCommand(); } else if (lag > AllowedLagInMinutesSev2) { new SendAlertMailTask { AlertSubject = string.Format("Warning: Lucene index for {0} out of date by {1} minutes", SearchEndPoint, Math.Round(lag, 2)), Details = string.Format("Search Index for endpoint {3} last updated {0} minutes back. Last activity (create/edit) in DB is at {1}, but lucene is update @ {2}", Math.Round(lag, 2), lastActivityTime, luceneCommitTimeStamp, SearchEndPoint), AlertName = "Warning: Alert for LuceneIndexLag", Component = "SearchService", Level = "Error" }.ExecuteCommand(); } ReportHelpers.AppendDatatoBlob(StorageAccount, "IndexingLagCount" + string.Format("{0:MMdd}", DateTime.Now) + "HourlyReport.json", new Tuple <string, string>(string.Format("{0:HH-mm}", DateTime.Now), lag.ToString()), 24 * 12, ContainerName); }
private void CreateReportForRequestDetails() { var json = new JavaScriptSerializer().Serialize(GetDBRequest()); ReportHelpers.AppendDatatoBlob(StorageAccount, "DBRequestDetails" + string.Format("{0:MMdd}", DateTime.Now) + ".json", new Tuple <string, string>(String.Format("{0:HH:mm}", DateTime.Now), json), 50, ContainerName); }