public TestSuiteBuilder(TestSuiteInfo testSuiteInfo) { if (testSuiteInfo == null) { throw new ArgumentNullException("testSuiteInfo"); } this.testSuiteInfo = testSuiteInfo; // get run descriptor this.runDescriptor = testSuiteInfo.TesterType.MethodsWith(Flags.AllMembers, typeof(PerfRunDescriptorAttribute)) .FirstOrDefault( m => m.ReturnType == typeof(double) && m.HasParameterSignature(new[] { typeof(int) })); // get set up this.setUp = testSuiteInfo.TesterType.MethodsWith(Flags.AllMembers, typeof(PerfSetUpAttribute)) .FirstOrDefault( m => m.ReturnType == typeof(void) && m.HasParameterSignature(new[] { typeof(int), testSuiteInfo.TestedAbstraction })); // get tear down this.tearDown = testSuiteInfo.TesterType.MethodsWith(Flags.AllMembers, typeof(PerfTearDownAttribute)) .FirstOrDefault( m => m.ReturnType == typeof(void) && m.HasParameterSignature(new[] { testSuiteInfo.TestedAbstraction })); }
public TestNodeViewModel(TesterNodeViewModel parent, PerfLab perfLab, TestSuiteInfo testSuite, string testMethodName) : base(parent) { this.PerfLab = perfLab; this.TestSuite = testSuite; this.Tests = testSuite.Tests.Where(x => x.TestMethodName == testMethodName).ToArray(); this.Name = testMethodName; this.Children.AddRange(from testInfo in this.Tests select new TestedTypeNodeViewModel(this, perfLab, testInfo) {IsChecked = false}); this.IsEnabled = this.Children.Count > 0; }
public TesterNodeViewModel(PerfLab lab, TestSuiteInfo testSuiteInfo) : base(null) { var descr = testSuiteInfo.TestSuiteDescription; this.Name = string.IsNullOrEmpty(descr) ? testSuiteInfo.TesterType.FullName : descr; this.TestSuiteInfo = testSuiteInfo; this.Lab = lab; this.Children.AddRange( testSuiteInfo.Tests.GroupBy(x => x.TestMethodName) .Select(x => new TestNodeViewModel(this, lab, testSuiteInfo, x.Key))); MessageBus.Current.Listen<ChartRemoved>().Subscribe(OnChartRemoved); this.IsEnabled = this.Children.Count > 0; }
public static IObservable<PerfTestResult> Run(TestSuiteInfo testSuiteInfo, int start, int step, int end, PerfTestConfiguration configuration, bool parallel = false) { return CreateRunObservable(testSuiteInfo, x => true, processes => processes.Start(start, step, end, !parallel), configuration, parallel); }
private static TestInfo GetTestInfo(Guid id, MethodInfo method, Type testedAbstraction, TestSuiteInfo suiteInfo, Type testedType) { CheckTestability(suiteInfo.TesterType, testedType); var testAttribute = method.Attribute<PerfTestAttribute>(); if (testAttribute == null) { throw new ArgumentNullException("method"); } if (method.ReturnType != typeof(void) || !method.HasParameterSignature(new[] { testedAbstraction })) { throw new ArgumentException("Incorrect parameter signature"); } if (testedType.IsGenericType) { // Fill the generic type arguments of the loaded generic type // with the tested abstraction interface actual generic type arguments. // Example: tested abstraction = IList<int>, tested type = List<T> // This line converts List<T> in List<int> testedType = testedType.MakeGenericType(testedAbstraction.GetGenericArguments()); } TestInfo result; var ignoreAttribute = method.Attribute<PerfIgnoreAttribute>(); if (ignoreAttribute == null) { result = new TestInfo { TestId = id, TestDescription = testAttribute.Description, TestMethodName = method.Name, TestedType = testedType, Suite = suiteInfo }; } else { result = new TestInfoIgnored { TestId = id, TestDescription = testAttribute.Description, TestMethodName = method.Name, IgnoreMessage = ignoreAttribute.Message, TestedType = testedType, Suite = suiteInfo }; } return result; }
private static IObservable<PerfTestResult> CreateRunObservable(TestSuiteInfo testSuiteInfo, Predicate<TestInfo> testFilter, Action<ExperimentProcess> startProcess, PerfTestConfiguration configuration, bool parallel = false) { return Observable.Create<PerfTestResult>( observer => { var assemblyLocation = BuildTestSuiteAssembly(testSuiteInfo); var processes = new MultiExperimentProcess( (from testMethod in testSuiteInfo.Tests where testFilter(testMethod) select new ExperimentProcess( string.Format( "{0}.{1}({2})", testSuiteInfo.TesterType.Name, testMethod.TestMethodName, testMethod.TestedType.Name), assemblyLocation, TestSuiteCodeBuilder.TestSuiteClassName, testSuiteInfo.TesterType, testMethod.TestedType, testMethod.TestMethodName, configuration)).ToArray()); var listeners = Observable.Empty<PerfTestResult>(); if (!parallel) { listeners = processes.Experiments.Aggregate(listeners, (current, experiment) => current.Concat( new SingleExperimentListener(experiment, startProcess))); } else { listeners = from experiment in processes.Experiments.ToObservable() from result in new SingleExperimentListener(experiment, startProcess) select result; } IDisposable subscription = null; subscription = listeners.SubscribeSafe(observer); return Disposable.Create( () => { if (subscription != null) { subscription.Dispose(); subscription = null; } processes.Dispose(); if (!string.IsNullOrEmpty(assemblyLocation)) { File.Delete(assemblyLocation); } }); }); }
private static string BuildTestSuiteAssembly(TestSuiteInfo testSuiteInfo) { var builder = new TestSuiteBuilder(testSuiteInfo); return builder.Build(); }
public ChartViewModel(PerfLab lab, TestSuiteInfo suiteInfo) { this.perfTestConfiguration = new PerfTestConfiguration(Settings.Default.IgnoreFirstRunDueToJITting, Settings.Default.TriggerGCBeforeEachTest); this.Title = suiteInfo.TestSuiteDescription; Lab = lab; TestSuiteInfo = suiteInfo; this.StartValue = 1; this.EndValue = suiteInfo.DefaultTestCount; this.StepValue = 1; SpeedPlotModel = new PlotModel(string.Format("\"{0}\": Time characteristics", suiteInfo.TestSuiteDescription)); SpeedPlotModel.Axes.Clear(); SpeedPlotModel.Axes.Add(new LinearAxis(AxisPosition.Bottom, suiteInfo.FeatureDescription)); MemoryPlotModel = new PlotModel(string.Format("\"{0}\": Memory usage", suiteInfo.TestSuiteDescription)); MemoryPlotModel.Axes.Clear(); MemoryPlotModel.Axes.Add(new LinearAxis(AxisPosition.Bottom, suiteInfo.FeatureDescription)); memorySeries = new Dictionary<TestInfo, LineSeries>(); speedSeries = new Dictionary<TestInfo, LineSeries>(); IsLinear = true; IsStarted = false; var errorHandler = IoC.Instance.Resolve<ErrorHandler>(); var whenStarted = this.WhenAny(x => x.IsStarted, x => x.Value); this.StartSequential = new ReactiveAsyncCommand(whenStarted.Select(x => !x)); StartSequential.RegisterAsyncAction(OnStartSequential, RxApp.DeferredScheduler); errorHandler.HandleErrors(this.StartSequential); this.StartParallel = new ReactiveAsyncCommand(whenStarted.Select(x => !x)); StartParallel.RegisterAsyncAction(OnStartParallel, RxApp.DeferredScheduler); errorHandler.HandleErrors(this.StartParallel); this.Stop = new ReactiveAsyncCommand(whenStarted); Stop.RegisterAsyncAction(OnStop, RxApp.DeferredScheduler); errorHandler.HandleErrors(this.Stop); this.WhenAny(x => x.IsLinear, x => x.Value ? EAxisType.Linear : EAxisType.Logarithmic) .Subscribe(SetAxisType); MessageBus.Current.Listen<PerfTestResult>() .Where(x => tests.FirstOrDefault(t => t.TestId.Equals(x.TestId)) != null) .Subscribe( res => { var nextRes = res as NextResult; var errorRes = res as ExperimentError; if (nextRes != null) { AddPoint(memorySeries, MemoryPlotModel, tests.First(x => x.TestId.Equals(res.TestId)), nextRes.Descriptor, nextRes.MemoryUsage); AddPoint(speedSeries, SpeedPlotModel, tests.First(x => x.TestId.Equals(res.TestId)), nextRes.Descriptor, nextRes.Duration); } if (errorRes != null) { var test = tests.FirstOrDefault(x => x.TestId.Equals(errorRes.TestId)); Task.Factory.StartNew(() => errorHandler.ReportExperimentError(errorRes, test)); } }, errorHandler.ReportException); ConnectIterationAndDescriptors(); }