public void Data_StreamLogs_SerialVsNonSerial_StartOffset() { FilterTests.Retry(retry => { var parameters = new LogParameters(null, RecordAge.All, 3000) { Start = 502 }; Logger.LogTestDetail("Retrieving normal logs"); var normalNoSkip = OrderLogs(client.StreamLogs(RecordAge.All, 3000, true)).ToList(); Logger.LogTestDetail("Retrieving parallel logs"); var normal = OrderLogs(client.StreamLogs(parameters)).ToList(); Logger.LogTestDetail("Retrieving serial logs"); var serial = OrderLogs(client.StreamLogs(parameters, true)).ToList(); AssertLogStreamsEqual(normal, serial, true); //Sometimes it might fail due to the order of entries at a given datetime being nondeterministic try { Assert.IsTrue(PrtgAPIHelpers.LogEqualityComparer().Equals(normalNoSkip[501], serial.First())); } catch (AssertFailedException) { var time = serial.First().DateTime; var matchingTime = normalNoSkip.Where(l => l.DateTime == time).ToList(); Assert.IsTrue(matchingTime.Any(l => PrtgAPIHelpers.LogEqualityComparer().Equals(l, serial.First()))); } }); }
public void Data_StreamLogs_SerialVsNonSerial_Normal() { FilterTests.Retry(retry => { var normal = OrderLogs(client.StreamLogs(RecordAge.All, 3000)).ToList(); var serial = OrderLogs(client.StreamLogs(RecordAge.All, 3000, true)).ToList(); AssertLogStreamsEqual(normal, serial, retry); }); }
public void Data_StreamLogs_WithIncorrectPageSize() { FilterTests.Retry(retry => { var correctParameters = new LogParameters(null, RecordAge.Today, 15) { PageSize = 5 }; var automaticParameters = new LogParameters(null, RecordAge.Today, 15) { Start = 0, PageSize = 5 }; var manualParameters = new LogParameters(null, RecordAge.Today, 5) { Start = 0 }; //The real logs that exist on the server. This is what all other requests compare against var correctLogs = client.GetLogs(correctParameters); //What we get when we make the same request with a starting index of 0. We expect GetLogs to return //something equivalent to a normal request, but StreamSensors to contain a duplicate at index 4 and 5 var automaticLogs = client.GetLogs(automaticParameters); var automaticStreamLogs = client.StreamLogs(automaticParameters, true).ToList(); //What we get when we manually increment the pages of a stream. We expect to end up with a list //identical to our streamed list var firstManualLogs = client.GetLogs(manualParameters); manualParameters.Page++; var secondManualLogs = client.GetLogs(manualParameters); manualParameters.Page++; var thirdManualLogs = client.GetLogs(manualParameters); var allManualLogs = new List <Log>(); allManualLogs.AddRange(firstManualLogs); allManualLogs.AddRange(secondManualLogs); allManualLogs.AddRange(thirdManualLogs); var comparer = PrtgAPIHelpers.LogEqualityComparer(); AssertEx.AreEqualLists(correctLogs, automaticLogs, comparer, "Correct logs were not equal to off by one logs", retry); AssertEx.AreEqualLists(automaticStreamLogs, allManualLogs, comparer, "Streamed off by one logs were not equal to manual logs", retry); Assert.IsTrue(comparer.Equals(automaticStreamLogs[4], automaticStreamLogs[5])); Assert.IsTrue(comparer.Equals(allManualLogs[4], allManualLogs[5])); //now check none of the OTHER elements are equal to each other var automaticDiff = automaticStreamLogs.Where((l, i) => i != 4 && i != 5).ToList(); var manualDiff = allManualLogs.Where((l, i) => i != 4 && i != 5).ToList(); AssertEx.AllListElementsUnique(automaticDiff, comparer); AssertEx.AllListElementsUnique(manualDiff, comparer); }); }
public void Data_StreamLogs_WithCorrectPageSize() { FilterTests.Retry(retry => { Stream_WithCorrectPageSize( () => client.GetLogs(RecordAge.Today, 15), () => client.StreamLogs(RecordAge.Today, 15, true), p => client.GetLogs(p), PrtgAPIHelpers.LogEqualityComparer(), new LogParameters(null, RecordAge.Today, 5), 1, retry ); }); }
public void Data_Stream_Parallel_Start_AndStartOffset_MultiplePages() { FilterTests.Retry(retry => { var parameters = new LogParameters(Settings.UpSensor, RecordAge.Today, null); var expected = client.GetLogs(parameters); var start = 130; parameters.Start = start; parameters.PageSize = 50; StreamLogs(parameters, expected.Skip(start - 1).ToList(), retry); }); }
public void Data_Stream_Parallel_Count_AndStartOffset_LessThanAvailable_SinglePage() { FilterTests.Retry(retry => { var parameters = new LogParameters(Settings.UpSensor, RecordAge.All, null); var expected = client.GetLogs(parameters); var count = 100; AssertEx.IsTrue(count < expected.Count, $"Expected {count} to be less than {expected.Count}"); parameters.Count = count; StreamLogs(parameters, expected.Take(count).ToList()); }); }
public void Data_Stream_Serially_Count_AndStartOffset_LessThanAvailable_MultiplePages() { FilterTests.Retry(retry => { var parameters = new LogParameters(Settings.UpSensor, RecordAge.All, null); var expected = client.GetLogs(parameters); var count = 100; Assert.IsTrue(count < expected.Count); parameters.Count = count; parameters.PageSize = 30; StreamLogsSerial(parameters, expected.Take(count).ToList()); }); }