Пример #1
0
        public static TestContext Start(IMessageLogger messageLogger, AdapterSettings settings)
        {
            var context = new TestContext(messageLogger, settings);

            TestContext.context.Value = context;
            return(context);
        }
Пример #2
0
        public static TestContext Start(PerformanceTestCase testCase, AdapterSettings settings)
        {
            var context = new TestContext(testCase, settings);

            TestContext.context.Value = context;
            return(context);
        }
Пример #3
0
 private TestContext(IMessageLogger messageLogger, AdapterSettings settings)
 {
     this.sb            = stringBuilderPool.Get();
     this.Properties    = settings.TestProperties;
     IsWarmup           = false;
     this.messageLogger = messageLogger;
 }
Пример #4
0
        private TestContext(PerformanceTestCase testCase, AdapterSettings settings)
        {
            this.sb = stringBuilderPool.Get();

            this.Properties = settings.TestProperties;
            IsWarmup        = false;
            TestCase        = testCase;
        }
Пример #5
0
 public TestLogger InitSettings(AdapterSettings settings)
 {
     adapterSettings = settings;
     Verbosity       = adapterSettings.Verbosity;
     return(this);
 }
Пример #6
0
        public void RunTests(IEnumerable <TestCase> tests, IRunContext runContext, IFrameworkHandle frameworkHandle)
        {
            var dir = Directory.GetCurrentDirectory();

            var folder = Path.Combine(dir, "TestResults");

            Directory.CreateDirectory(folder);
            var file = Path.Combine(folder, $"{Environment.UserName}_{Environment.MachineName}_{DateTime.Now:yyyy-MM-dd_HH_mm_ss}.csv");

            using (var fs = File.OpenWrite(file))
                using (var tw = new StreamWriter(fs))
                {
                    tw.WriteCsvLine("Test Name", "Iteration", "Is Warmup", "Duration", "Iteration Status", "Run Status");

                    var logger   = new TestLogger(frameworkHandle);
                    var settings = new AdapterSettings(logger);
                    settings.Load(runContext.RunSettings.SettingsXml);
                    logger.InitSettings(settings);

                    frameworkHandle.EnableShutdownAfterTestRun = true;
                    var toRun = Convert(tests);

                    var missing = tests.Except(toRun.Select(x => x.testCase));
                    foreach (var m in missing)
                    {
                        frameworkHandle.RecordEnd(m, TestOutcome.NotFound);
                    }

                    var lifecycleEvents = TestLifeCyclesCallbacks(tests.Select(x => x.Source).Distinct().ToList());
                    var beforeAll       = lifecycleEvents.OfType <ITestLifecycleBeforeAllTests>().ToArray();
                    var afterAll        = lifecycleEvents.OfType <ITestLifecycleAfterAllTests>().ToArray();

                    // generate report details for all runs etc of all calls

                    // parallel etc in here
                    using (var globalCtx = TestContext.Start(frameworkHandle, settings))
                    {
                        foreach (var evnt in beforeAll)
                        {
                            evnt.BeforeAllTests(globalCtx).GetAwaiter().GetResult();
                        }
                    }

                    foreach (var t in toRun)
                    {
                        var testResult = new TestResult(t.testCase);
                        if (t.perfTest.Skipped)
                        {
                            testResult.Outcome = TestOutcome.Skipped;
                            frameworkHandle.RecordResult(testResult);

                            tw.WriteCsvLine(t.perfTest.Name, "-", "-", "-", "Skipped");
                            continue;
                        }
                        frameworkHandle.RecordStart(t.testCase);
                        using (var context = TestContext.Start(t.perfTest, settings))
                        {
                            var sw   = Stopwatch.StartNew();
                            var task = t.perfTest.ExecuteAsync(context);

                            Task.WaitAll(task);
                            sw.Stop();
                            var result = task.Result;

                            var errors = result.Select(x => x.Error).Where(x => x != null).ToList();
                            if (errors.Any())
                            {
                                testResult.ErrorStackTrace = string.Join("\n\n-------\n\n", errors.Select(x => x.StackTrace));
                                testResult.ErrorMessage    = string.Join("\n\n-------\n\n", errors.Select(x => x.Message));

                                testResult.Outcome = TestOutcome.Failed;
                            }
                            else
                            {
                                testResult.Outcome = TestOutcome.Passed;
                            }

                            int counter = 0;
                            foreach (var r in result.Where(x => x.IsWarmup))
                            {
                                tw.WriteCsvLine(t.perfTest.Name, ++counter, r.IsWarmup, r.Duration.TotalSeconds, r.Error == null ? TestOutcome.Passed : TestOutcome.Failed, testResult.Outcome);
                            }
                            counter = 0;
                            foreach (var r in result.Where(x => !x.IsWarmup))
                            {
                                tw.WriteCsvLine(t.perfTest.Name, ++counter, r.IsWarmup, r.Duration.TotalSeconds, r.Error == null ? TestOutcome.Passed : TestOutcome.Failed, testResult.Outcome);
                            }

                            // process the results here
                            testResult.Duration = sw.Elapsed;


                            var runs    = result.Where(x => x.IsWarmup == false).Select(x => x.Duration);
                            var warmups = result.Where(x => x.IsWarmup == true).Select(x => x.Duration);

                            var mean = TimeSpanStatistics.Mean(runs);
                            var standardDeviation = TimeSpanStatistics.StandardDeviation(runs);

                            // format a table of output results here
                            var msg = $@"Warm up Count : {warmups.Count()}
Warm up Duration : {new TimeSpan(warmups.Sum(x => x.Ticks))}
Executed : {runs.Count()}
Mean Duration: {mean}
Standard Deviation Duration: {standardDeviation}
";

                            testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, msg));
                            testResult.Messages.Add(new TestResultMessage(TestResultMessage.StandardOutCategory, context.Output));

                            foreach (var r in result.Where(x => !string.IsNullOrWhiteSpace(x.Output)))
                            {
                                testResult.Messages.Add(new TestResultMessage(TestResultMessage.AdditionalInfoCategory, r.Output));
                            }


                            frameworkHandle.RecordResult(testResult);
                        }
                    }

                    using (var globalCtx = TestContext.Start(frameworkHandle, settings))
                    {
                        foreach (var evnt in afterAll)
                        {
                            evnt.AfterAllTests(globalCtx).GetAwaiter().GetResult();
                        }
                    }
                }
        }