public PagedResult <LogMessage> GetLogs(string orderDirection = "Descending", int pageNumber = 1, string filterExpression = null, [FromUri] string[] logLevels = null) { //We will need to stop the request if trying to do this on a 1GB file if (CanViewLogs() == false) { throw new HttpResponseException(Request.CreateNotificationValidationErrorResponse("Unable to view logs, due to size")); } var direction = orderDirection == "Descending" ? Direction.Descending : Direction.Ascending; return(_logViewer.GetLogs(startDate: DateTime.Now.AddDays(-1), endDate: DateTime.Now, filterExpression: filterExpression, pageNumber: pageNumber, orderDirection: direction, logLevels: logLevels)); }
public ActionResult <PagedResult <LogMessage> > GetLogs(string orderDirection = "Descending", int pageNumber = 1, string filterExpression = null, [FromQuery(Name = "logLevels[]")] string[] logLevels = null, [FromQuery] DateTime?startDate = null, [FromQuery] DateTime?endDate = null) { var logTimePeriod = GetTimePeriod(startDate, endDate); //We will need to stop the request if trying to do this on a 1GB file if (CanViewLogs(logTimePeriod) == false) { return(ValidationProblem("Unable to view logs, due to size")); } var direction = orderDirection == "Descending" ? Direction.Descending : Direction.Ascending; return(_logViewer.GetLogs(logTimePeriod, filterExpression: filterExpression, pageNumber: pageNumber, orderDirection: direction, logLevels: logLevels)); }
public void Logs_Can_Be_Queried() { var sw = new Stopwatch(); sw.Start(); // Should get me the most 100 recent log entries & using default overloads for remaining params PagedResult <LogMessage> allLogs = _logViewer.GetLogs(_logTimePeriod, pageNumber: 1); sw.Stop(); // Check we get 100 results back for a page & total items all good :) Assert.AreEqual(100, allLogs.Items.Count()); Assert.AreEqual(102, allLogs.TotalItems); Assert.AreEqual(2, allLogs.TotalPages); // Check collection all contain same object type CollectionAssert.AllItemsAreInstancesOfType(allLogs.Items, typeof(LogMessage)); // Check first item is newest LogMessage newestItem = allLogs.Items.First(); DateTimeOffset.TryParse("2018-11-12T08:39:18.1971147Z", out DateTimeOffset newDate); Assert.AreEqual(newDate, newestItem.Timestamp); // Check we call method again with a smaller set of results & in ascending PagedResult <LogMessage> smallQuery = _logViewer.GetLogs(_logTimePeriod, pageNumber: 1, pageSize: 10, orderDirection: Direction.Ascending); Assert.AreEqual(10, smallQuery.Items.Count()); Assert.AreEqual(11, smallQuery.TotalPages); // Check first item is oldest LogMessage oldestItem = smallQuery.Items.First(); DateTimeOffset.TryParse("2018-11-12T08:34:45.8371142Z", out DateTimeOffset oldDate); Assert.AreEqual(oldDate, oldestItem.Timestamp); // Check invalid log levels // Rather than expect 0 items - get all items back & ignore the invalid levels string[] invalidLogLevels = { "Invalid", "NotALevel" }; PagedResult <LogMessage> queryWithInvalidLevels = _logViewer.GetLogs(_logTimePeriod, pageNumber: 1, logLevels: invalidLogLevels); Assert.AreEqual(102, queryWithInvalidLevels.TotalItems); // Check we can call method with an array of logLevel (error & warning) string[] logLevels = { "Warning", "Error" }; PagedResult <LogMessage> queryWithLevels = _logViewer.GetLogs(_logTimePeriod, pageNumber: 1, logLevels: logLevels); Assert.AreEqual(7, queryWithLevels.TotalItems); // Query @Level='Warning' BUT we pass in array of LogLevels for Debug & Info (Expect to get 0 results) string[] logLevelMismatch = { "Debug", "Information" }; PagedResult <LogMessage> filterLevelQuery = _logViewer.GetLogs(_logTimePeriod, pageNumber: 1, filterExpression: "@Level='Warning'", logLevels: logLevelMismatch); Assert.AreEqual(0, filterLevelQuery.TotalItems); }
public HttpResponseMessage Export(string orderDirection = "Descending", string filterExpression = null, [FromUri] string[] logLevels = null, [FromUri] DateTime?startDate = null, [FromUri] DateTime?endDate = null) { var sleeps = new[] { 1000, 2000, 3000, 4000, 5000, 6000 }; var rand = new Random(); //System.Threading.Thread.Sleep(sleeps[rand.Next(sleeps.Length)]); try { var logTimePeriod = GetTimePeriod(startDate, endDate); if (CanViewLogs(logTimePeriod) == false) { throw new HttpResponseException(Request.CreateNotificationValidationErrorResponse("Unable to export logs, due to size")); } var direction = orderDirection == "Descending" ? Direction.Descending : Direction.Ascending; var items = new List <LogMessage>(); var pageNumber = 1; const int pageSize = 2000; var results = _logViewer.GetLogs(logTimePeriod, pageNumber, pageSize, direction, filterExpression, logLevels); items.AddRange(results.Items); while (pageNumber < results.TotalPages) { pageNumber++; results = _logViewer.GetLogs(logTimePeriod, pageNumber, pageSize, direction, filterExpression, logLevels); items.AddRange(results.Items); } if (!items.Any()) { throw new HttpResponseException(Request.CreateNotificationValidationErrorResponse("Unable to export logs, no messages were found")); } var stream = new MemoryStream(); var filename = $@"log-export-{DateTime.Now:yyyyMMddHHmmss}.xlsx"; _logExportBuilder.ProcessData(stream, logTimePeriod, items); stream.Position = 0; var result = new HttpResponseMessage(HttpStatusCode.OK) { Content = new StreamContent(stream) }; result.Content.Headers.Add("Access-Control-Expose-Headers", "Content-Disposition"); result.Content.Headers.ContentType = new MediaTypeHeaderValue("application/vnd.openxmlformats-officedocument.spreadsheetml.sheet"); result.Content.Headers.ContentDisposition = new ContentDispositionHeaderValue("attachment") { FileName = filename }; return(result); } catch (Exception ex) { _logger.Error <LogExporterController>("Failed to export", ex); throw new HttpResponseException(Request.CreateNotificationValidationErrorResponse("Unable to export logs, internal server error")); } }