Пример #1
0
        public static BenchmarkSettings CreateSettingsForBenchmark(BenchmarkClassMetadata benchmarkClass)
        {
            var allBenchmarkMethodAttributes = benchmarkClass.Run.InvocationMethod.GetCustomAttributes().ToList();

            var performanceTestAttribute =
                allBenchmarkMethodAttributes.Single(a => a is PerfBenchmarkAttribute) as
                    PerfBenchmarkAttribute;
            Contract.Assert(performanceTestAttribute != null);

            var memorySettings =
                allBenchmarkMethodAttributes.Where(a => a is MemoryMeasurementAttribute)
                    .Cast<MemoryMeasurementAttribute>()
                    .Select(CreateBenchmarkSetting)
                    .ToList();
            var counterBenchmarkSettings =
                allBenchmarkMethodAttributes.Where(a => a is CounterMeasurementAttribute)
                    .Cast<CounterMeasurementAttribute>()
                    .Select(CreateBenchmarkSetting)
                    .ToList();
            var gcBenchmarkSettings =
                allBenchmarkMethodAttributes.Where(a => a is GcMeasurementAttribute)
                    .Cast<GcMeasurementAttribute>()
                    .Select(CreateBenchmarkSetting)
                    .ToList();

            return new BenchmarkSettings(performanceTestAttribute.TestMode, performanceTestAttribute.RunMode,
                performanceTestAttribute.NumberOfIterations, performanceTestAttribute.RunTimeMilliseconds,
                gcBenchmarkSettings, memorySettings, counterBenchmarkSettings, performanceTestAttribute.Description,
                performanceTestAttribute.Skip);
        }
Пример #2
0
        public static BenchmarkSettings CreateSettingsForBenchmark(BenchmarkClassMetadata benchmarkClass)
        {
            var allBenchmarkMethodAttributes = benchmarkClass.Run.InvocationMethod.GetCustomAttributes().ToList();

            var performanceTestAttribute =
                allBenchmarkMethodAttributes.Single(a => a is PerfBenchmarkAttribute) as
                PerfBenchmarkAttribute;

            Contract.Assert(performanceTestAttribute != null);

            var memorySettings =
                allBenchmarkMethodAttributes.Where(a => a is MemoryMeasurementAttribute)
                .Cast <MemoryMeasurementAttribute>()
                .Select(CreateBenchmarkSetting)
                .ToList();
            var counterBenchmarkSettings =
                allBenchmarkMethodAttributes.Where(a => a is CounterMeasurementAttribute)
                .Cast <CounterMeasurementAttribute>()
                .Select(CreateBenchmarkSetting)
                .ToList();
            var gcBenchmarkSettings =
                allBenchmarkMethodAttributes.Where(a => a is GcMeasurementAttribute)
                .Cast <GcMeasurementAttribute>()
                .Select(CreateBenchmarkSetting)
                .ToList();

            return(new BenchmarkSettings(performanceTestAttribute.TestMode, performanceTestAttribute.RunMode,
                                         performanceTestAttribute.NumberOfIterations, performanceTestAttribute.RunTimeMilliseconds,
                                         gcBenchmarkSettings, memorySettings, counterBenchmarkSettings, performanceTestAttribute.Description,
                                         performanceTestAttribute.Skip));
        }
        public BenchmarkSettings CreateSettingsForBenchmark(BenchmarkClassMetadata benchmarkClass)
        {
            var allBenchmarkMethodAttributes = benchmarkClass.Run.InvocationMethod.GetCustomAttributes().ToList();

            var performanceTestAttribute =
                allBenchmarkMethodAttributes.Single(a => a is PerfBenchmarkAttribute) as
                PerfBenchmarkAttribute;

            Contract.Assert(performanceTestAttribute != null);

            var allMeasurementAttributes =
                allBenchmarkMethodAttributes.Where(a => MeasurementAttributeType.IsInstanceOfType(a)).Cast <MeasurementAttribute>();

            var measurements = new List <IBenchmarkSetting>();
            var collectors   = new Dictionary <MetricName, MetricsCollectorSelector>();

            foreach (var measurement in allMeasurementAttributes)
            {
                var configurator = GetConfiguratorForMeasurement(measurement.GetType());
                if (configurator is MeasurementConfigurator.EmptyConfigurator)
                {
                    Output.Warning($"Unable to find valid configurator for {measurement} - skipping...");
                    continue;
                }

                var benchmarkSettings = configurator.GetBenchmarkSettings(measurement);
                var selector          = configurator.GetMetricsProvider(measurement);
                foreach (var setting in benchmarkSettings)
                {
                    var name = setting.MetricName;
                    measurements.Add(setting);
                    collectors[name] = selector;
                }
            }

            // TODO: need to start packing more of these settings in as propreties rather than constructor arguments
            // it's becoming unsustainable, the number of different things we need to pass in here
            return(new BenchmarkSettings(performanceTestAttribute.TestMode, performanceTestAttribute.RunMode,
                                         performanceTestAttribute.NumberOfIterations, performanceTestAttribute.RunTimeMilliseconds,
                                         measurements, collectors, performanceTestAttribute.Description,
                                         performanceTestAttribute.Skip, Trace, RunnerSettings.ConcurrentModeEnabled)
            {
                SkipWarmups = performanceTestAttribute.SkipWarmups
            });
        }
Пример #4
0
        public BenchmarkSettings CreateSettingsForBenchmark(BenchmarkClassMetadata benchmarkClass)
        {
            var allBenchmarkMethodAttributes = benchmarkClass.Run.InvocationMethod.GetCustomAttributes().ToList();

            var performanceTestAttribute =
                allBenchmarkMethodAttributes.Single(a => a is PerfBenchmarkAttribute) as
                PerfBenchmarkAttribute;

            Contract.Assert(performanceTestAttribute != null);

            var allMeasurementAttributes =
                allBenchmarkMethodAttributes.Where(a => MeasurementAttributeType.IsInstanceOfType(a)).Cast <MeasurementAttribute>();

            var measurements = new List <IBenchmarkSetting>();
            var collectors   = new Dictionary <MetricName, MetricsCollectorSelector>();

            foreach (var measurement in allMeasurementAttributes)
            {
                var configurator = GetConfiguratorForMeasurement(measurement.GetType());
                if (configurator is MeasurementConfigurator.EmptyConfigurator)
                {
                    Output.Warning($"Unable to find valid configurator for {measurement} - skipping...");
                    continue;
                }

                var benchmarkSettings = configurator.GetBenchmarkSettings(measurement);
                var selector          = configurator.GetMetricsProvider(measurement);
                foreach (var setting in benchmarkSettings)
                {
                    var name = setting.MetricName;
                    measurements.Add(setting);
                    collectors[name] = selector;
                }
            }

            return(new BenchmarkSettings(performanceTestAttribute.TestMode, performanceTestAttribute.RunMode,
                                         performanceTestAttribute.NumberOfIterations, performanceTestAttribute.RunTimeMilliseconds,
                                         measurements, collectors, performanceTestAttribute.Description,
                                         performanceTestAttribute.Skip));
        }
 public static IBenchmarkInvoker CreateInvokerForBenchmark(BenchmarkClassMetadata benchmarkClass)
 {
     return(new ReflectionBenchmarkInvoker(benchmarkClass));
 }
Пример #6
0
 public static IBenchmarkInvoker CreateInvokerForBenchmark(BenchmarkClassMetadata benchmarkClass)
 {
     return new ReflectionBenchmarkInvoker(benchmarkClass);
 }
Пример #7
0
        public BenchmarkSettings CreateSettingsForBenchmark(BenchmarkClassMetadata benchmarkClass)
        {
            var allBenchmarkMethodAttributes = benchmarkClass.Run.InvocationMethod.GetCustomAttributes().ToList();

            var performanceTestAttribute =
                allBenchmarkMethodAttributes.Single(a => a is PerfBenchmarkAttribute) as
                    PerfBenchmarkAttribute;
            Contract.Assert(performanceTestAttribute != null);

            var allMeasurementAttributes =
                allBenchmarkMethodAttributes.Where(a => MeasurementAttributeType.IsInstanceOfType(a)).Cast<MeasurementAttribute>();

            var measurements = new List<IBenchmarkSetting>();
            var collectors = new Dictionary<MetricName, MetricsCollectorSelector>();

            foreach (var measurement in allMeasurementAttributes)
            {
                var configurator = GetConfiguratorForMeasurement(measurement.GetType());
                if (configurator is MeasurementConfigurator.EmptyConfigurator)
                {
                    Output.Warning($"Unable to find valid configurator for {measurement} - skipping...");
                    continue;
                }

                var benchmarkSettings = configurator.GetBenchmarkSettings(measurement);
                var selector = configurator.GetMetricsProvider(measurement);
                foreach (var setting in benchmarkSettings)
                {
                    var name = setting.MetricName;
                    measurements.Add(setting);
                    collectors[name] = selector;
                }
            }

            // TODO: need to start packing more of these settings in as propreties rather than constructor arguments
            // it's becoming unsustainable, the number of different things we need to pass in here
            return new BenchmarkSettings(performanceTestAttribute.TestMode, performanceTestAttribute.RunMode,
                performanceTestAttribute.NumberOfIterations, performanceTestAttribute.RunTimeMilliseconds,
                measurements, collectors, performanceTestAttribute.Description,
                performanceTestAttribute.Skip, Trace, RunnerSettings.ConcurrentModeEnabled){ SkipWarmups = performanceTestAttribute.SkipWarmups };
        }
 public ReflectionBenchmarkInvoker(BenchmarkClassMetadata metadata)
 {
     _metadata = metadata;
     BenchmarkName = $"{metadata.BenchmarkClass.FullName}+{metadata.Run.InvocationMethod.Name}";
 }