public void Report(Program.Options options) { var computerSpecifications = new ComputerSpecifications(); if (options.ShouldPrintComputerSpecifications) { Console.WriteLine(); Console.Write(computerSpecifications.ToString()); } if (!options.ShouldGenerateReport) { return; } var path = Path.Combine(Environment.CurrentDirectory, _perfTestType.Name + "-" + DateTime.Now.ToString("yyyy-MM-dd hh-mm-ss") + ".html"); File.WriteAllText(path, BuildReport(computerSpecifications)); var totalsPath = Path.Combine(Environment.CurrentDirectory, $"Totals-{DateTime.Now:yyyy-MM-dd}.csv"); File.AppendAllText(totalsPath, $"{DateTime.Now:HH:mm:ss},{_perfTestType.Name},{_results.Max(x => x.Histogram.GetValueAtPercentile(99))}\n"); if (options.ShouldOpenReport) { Process.Start(path); } }
public void Report(Program.Options options) { var computerSpecifications = new ComputerSpecifications(); if (options.ShouldPrintComputerSpecifications) { Console.WriteLine(); Console.Write(computerSpecifications.ToString()); } if (!options.ShouldGenerateReport) { return; } var path = Path.Combine(Environment.CurrentDirectory, _perfTestType.Name + "-" + DateTime.UtcNow.ToString("yyyy-MM-dd hh-mm-ss") + ".html"); File.WriteAllText(path, BuildReport(computerSpecifications)); var totalsPath = Path.Combine(Environment.CurrentDirectory, $"Totals-{DateTime.Now:yyyy-MM-dd}.csv"); var average = _results.Average(x => x.TotalOperationsInRun / x.Duration.TotalSeconds); File.AppendAllText(totalsPath, FormattableString.Invariant($"{DateTime.Now:HH:mm:ss},{_perfTestType.Name},{average}\n")); if (options.ShouldOpenReport) { Process.Start(path); } }
public void Run(Program.Options options) { _runCount = options.RunCount ?? 7; Console.WriteLine($"Throughput Test to run => {_perfTestType.FullName}, Runs => {_runCount}"); _test = (IThroughputTest)Activator.CreateInstance(_perfTestType); CheckProcessorsRequirements(_test); Console.WriteLine("Starting"); var context = new ThroughputSessionContext(); for (var i = 0; i < _runCount; i++) { GC.Collect(); GC.WaitForPendingFinalizers(); context.Reset(); var beforeGen0Count = GC.CollectionCount(0); var beforeGen1Count = GC.CollectionCount(1); var beforeGen2Count = GC.CollectionCount(2); long totalOperationsInRun = 0; Exception exception = null; ThroughputTestSessionResult result; try { totalOperationsInRun = _test.Run(context); } catch (Exception ex) { exception = ex; } if (exception != null) { result = new ThroughputTestSessionResult(exception); } else { var gen0Count = GC.CollectionCount(0) - beforeGen0Count; var gen1Count = GC.CollectionCount(1) - beforeGen1Count; var gen2Count = GC.CollectionCount(2) - beforeGen2Count; result = new ThroughputTestSessionResult(totalOperationsInRun, context.Stopwatch.Elapsed, gen0Count, gen1Count, gen2Count, context); } Console.WriteLine(result); _results.Add(result); } }
public void Run(Program.Options options) { _runCount = options.RunCount ?? 3; Console.WriteLine($"Latency Test to run => {_perfTestType.FullName}, Runs => {_runCount}"); _test = (ILatencyTest)Activator.CreateInstance(_perfTestType); CheckProcessorsRequirements(_test); Console.WriteLine("Starting"); var stopwatch = new Stopwatch(); var histogram = new LongHistogram(10000000000L, 4); for (var i = 0; i < _runCount; i++) { stopwatch.Reset(); histogram.Reset(); GC.Collect(); GC.WaitForPendingFinalizers(); var beforeGen0Count = GC.CollectionCount(0); var beforeGen1Count = GC.CollectionCount(1); var beforeGen2Count = GC.CollectionCount(2); Exception exception = null; LatencyTestSessionResult result = null; try { _test.Run(stopwatch, histogram); } catch (Exception ex) { exception = ex; } if (exception != null) { result = new LatencyTestSessionResult(exception); } else { var gen0Count = GC.CollectionCount(0) - beforeGen0Count; var gen1Count = GC.CollectionCount(1) - beforeGen1Count; var gen2Count = GC.CollectionCount(2) - beforeGen2Count; result = new LatencyTestSessionResult(histogram, stopwatch.Elapsed, gen0Count, gen1Count, gen2Count); } Console.WriteLine(result); _results.Add(result); } }