public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var orderers = validationParameters .Benchmarks .Where(benchmark => benchmark.Config.Orderer != Order.DefaultOrderer.Instance) .Select(benchmark => benchmark.Config.Orderer) .Distinct(); if (orderers.Count() > 1) { yield return(new ValidationError(true, "You use JoinSummary options, but provided configurations cannot be joined. Only one Orderer per benchmark cases is allowed.")); } var styles = validationParameters .Benchmarks .Where(benchmark => benchmark.Config.SummaryStyle != SummaryStyle.Default && benchmark.Config.SummaryStyle != null) // Paranoid .Select(benchmark => benchmark.Config.SummaryStyle) .Distinct(); if (styles.Count() > 1) { yield return(new ValidationError(true, "You use JoinSummary options, but provided configurations cannot be joined. Only one SummaryStyle per benchmark cases is allowed.")); } }
public bool TreatsWarningsAsErrors => true; // it is a must! public IEnumerable <ValidationError> Validate(ValidationParameters input) { var allBenchmarks = input.Benchmarks.ToImmutableArray(); var orderProvider = input.Config.Orderer; var benchmarkLogicalGroups = allBenchmarks .Select(benchmark => orderProvider.GetLogicalGroupKey(allBenchmarks, benchmark)) .ToArray(); var logicalGroups = benchmarkLogicalGroups.Distinct().ToArray(); foreach (string logicalGroup in logicalGroups) { var benchmarks = allBenchmarks.Where((benchmark, index) => benchmarkLogicalGroups[index] == logicalGroup).ToArray(); int methodBaselineCount = benchmarks.Select(b => b.Descriptor).Distinct().Count(it => it.Baseline); int jobBaselineCount = benchmarks.Select(b => b.Job).Distinct().Count(it => it.Meta.Baseline); string className = benchmarks.First().Descriptor.Type.Name; if (methodBaselineCount > 1) { yield return(CreateError("benchmark method", "Baseline = true", logicalGroup, className, methodBaselineCount.ToString())); } if (jobBaselineCount > 1) { yield return(CreateError("job", "Baseline = true", logicalGroup, className, jobBaselineCount.ToString())); } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { if (validationParameters.Config.GetLoggers().IsEmpty()) { const string errorMessage = "No loggers defined, you will not see any progress!"; ConsoleLogger.Default.WriteLineError(errorMessage); // no loggers defined, so we need to somehow display this information yield return(new ValidationError(false, errorMessage)); } if (validationParameters.Config.GetExporters().IsEmpty()) { yield return(new ValidationError(false, "No exporters defined, results will not be persisted.")); } if (validationParameters.Config.GetColumnProviders().IsEmpty()) { yield return(new ValidationError(false, "No column providers defined, result table will be empty.")); } var pathValidation = ValidateArtifactsPath(validationParameters.Config.ArtifactsPath); if (pathValidation != null) { yield return(pathValidation); } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { foreach (var group in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Descriptor.Type.GetTypeInfo().Assembly)) { foreach (var referencedAssemblyName in group.Key.GetReferencedAssemblies()) { var referencedAssembly = Assembly.Load(referencedAssemblyName); // LINQPad.exe is non-optimized on purpose, see https://github.com/dotnet/BenchmarkDotNet/issues/580#issuecomment-345484889 for more details // we don't warn about non-optimized dependency to LINQPad // but we give extra hint if the dll with benchmark itself was build without optimization by LINQPad if (referencedAssembly.IsJitOptimizationDisabled().IsTrue() && !referencedAssembly.IsLinqPad()) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks references non-optimized {referencedAssemblyName.Name}" + (TreatsWarningsAsErrors ? $"{Environment.NewLine}\tIf you own this dependency, please, build it in RELEASE." + $"{Environment.NewLine}\tIf you don't, you can create custom config with {nameof(JitOptimizationsValidator)}.{nameof(DontFailOnError)} to disable our custom policy and allow this benchmark to run." : string.Empty))); } } if (group.Key.IsJitOptimizationDisabled().IsTrue()) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks is non-optimized" + Environment.NewLine + "Benchmark was built without optimization enabled (most probably a DEBUG configuration). Please, build it in RELEASE." + (group.Key.IsLinqPad() ? Environment.NewLine + "Please enable optimizations in your LINQPad. Go to Preferences -> Query and select \"compile with /optimize+\"" : string.Empty))); } } }
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters) { var errors = new List<ValidationError>(); foreach (var typeGroup in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Target.Type)) { object benchmarkTypeInstance = null; if (!TryCreateBenchmarkTypeInstance(typeGroup.Key, errors, out benchmarkTypeInstance)) { continue; } if (!TryToSetParamsFields(benchmarkTypeInstance, errors)) { continue; } if (!TryToSetParamsProperties(benchmarkTypeInstance, errors)) { continue; } if (!TryToCallSetup(benchmarkTypeInstance, errors)) { continue; } ExecuteBenchmarks(benchmarkTypeInstance, typeGroup, errors); } return errors; }
public bool TreatsWarningsAsErrors => true; // it is a must! public IEnumerable <ValidationError> Validate(ValidationParameters input) { var allBenchmarks = input.Benchmarks.ToArray(); var orderProvider = input.Config.GetOrderer() ?? DefaultOrderer.Instance; var benchmarkLogicalGroups = allBenchmarks .Select(benchmark => orderProvider.GetLogicalGroupKey(input.Config, allBenchmarks, benchmark)) .ToArray(); var logicalGroups = benchmarkLogicalGroups.Distinct().ToArray(); foreach (var logicalGroup in logicalGroups) { var benchmarks = allBenchmarks.Where((benchmark, index) => benchmarkLogicalGroups[index] == logicalGroup).ToArray(); var methodBaselineCount = benchmarks.Count(b => b.Descriptor.Baseline); var jobBaselineCount = benchmarks.Count(b => b.Job.Meta.Baseline); var className = benchmarks.First().Descriptor.Type.Name; if (methodBaselineCount > 1) { yield return(CreateError("benchmark method", "Baseline = true", logicalGroup, className, methodBaselineCount.ToString())); } if (jobBaselineCount > 1) { yield return(CreateError("job", "Baseline = true", logicalGroup, className, jobBaselineCount.ToString())); } if (methodBaselineCount > 0 && jobBaselineCount > 1) { yield return(CreateError("job-benchmark pair", "Baseline property", logicalGroup, className, "both")); } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var resolver = new CompositeResolver(EnvironmentResolver.Instance, EngineResolver.Instance); // TODO: use specified resolver. foreach (var benchmark in validationParameters.Benchmarks) { var run = benchmark.Job.Run; int unrollFactor = run.ResolveValue(RunMode.UnrollFactorCharacteristic, resolver); if (unrollFactor <= 0) { yield return(new ValidationError(true, $"Specified UnrollFactor ({unrollFactor}) must be greater than zero", benchmark)); } else if (run.HasValue(RunMode.InvocationCountCharacteristic)) { int invocationCount = run.InvocationCount; if (invocationCount % unrollFactor != 0) { string message = $"Specified InvocationCount ({invocationCount}) must be a multiple of UnrollFactor ({unrollFactor})"; yield return(new ValidationError(true, message, benchmark)); } } foreach (var validationError in ValidateMinMax(run, resolver, benchmark, RunMode.MinIterationCountCharacteristic, RunMode.MaxIterationCountCharacteristic)) { yield return(validationError); } foreach (var validationError in ValidateMinMax(run, resolver, benchmark, RunMode.MinWarmupIterationCountCharacteristic, RunMode.MaxWarmupIterationCountCharacteristic)) { yield return(validationError); } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var errors = new List <ValidationError>(); foreach (var typeGroup in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Descriptor.Type)) { if (!TryCreateBenchmarkTypeInstance(typeGroup.Key, errors, out var benchmarkTypeInstance)) { continue; } if (!TryToSetParamsFields(benchmarkTypeInstance, errors)) { continue; } if (!TryToSetParamsProperties(benchmarkTypeInstance, errors)) { continue; } if (!TryToCallGlobalSetup(benchmarkTypeInstance, errors)) { continue; } ExecuteBenchmarks(benchmarkTypeInstance, typeGroup, errors); } return(errors); }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { foreach (var group in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Descriptor.Type.GetTypeInfo().Assembly)) { foreach (var referencedAssemblyName in group.Key.GetReferencedAssemblies()) { var referencedAssembly = Assembly.Load(referencedAssemblyName); if (referencedAssembly.IsJitOptimizationDisabled().IsTrue() && !IsLinqPad(referencedAssembly)) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks references non-optimized {referencedAssemblyName.Name}" + (TreatsWarningsAsErrors ? $"{Environment.NewLine}\tIf you own this dependency, please, build it in RELEASE." + $"{Environment.NewLine}\tIf you don't, you can create custom config with {nameof(DontFailOnError)} to disable our custom policy and allow this benchmark to run." : string.Empty))); } } if (group.Key.IsJitOptimizationDisabled().IsTrue()) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks is non-optimized" + Environment.NewLine + "Benchmark was built without optimization enabled (most probably a DEBUG configuration). Please, build it in RELEASE." + (IsLinqPad(group.Key) ? Environment.NewLine + "Please enable optimizations in your LINQPad. Go to Preferences -> Query and select \"compile with /optimize+\"" : string.Empty))); } } }
public IEnumerable <ValidationError> Validate(ValidationParameters input) => input.Benchmarks .Select(benchmark => benchmark.Descriptor.Type) .Distinct() .SelectMany(type => type.GetTypeMembersWithGivenAttribute <ParamsAllValuesAttribute>(ReflectionFlags)) .Distinct() .Select(member => GetErrorOrDefault(member.ParameterType)) .Where(error => error != null);
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) => validationParameters .Benchmarks .Select(benchmark => benchmark.Descriptor.Type.Assembly) .Distinct() .SelectMany(assembly => assembly.GetRunnableBenchmarks()) .SelectMany(GenericBenchmarksBuilder.BuildGenericsIfNeeded) .Where(result => !result.isSuccess) .Select(result => new ValidationError(false, $"Generic type {result.result.Name} failed to build due to wrong type argument or arguments count, ignoring."));
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) => validationParameters .Benchmarks .Select(benchmark => benchmark.Target.Type.Assembly) .Distinct() .Where(assembly => assembly.Location.StartsWith(Path.GetTempPath())) .Select( assembly => new ValidationError( false, $"Assembly {assembly} is located in temp. If you are running benchmarks from xUnit you need to disable shadow copy. It's not supported by design."));
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) => validationParameters .Benchmarks .Where(benchmark => !IsValidCSharpIdentifier(benchmark.Target.Method.Name)) .Distinct(BenchmarkMethodEqualityComparer.Instance) // we might have multiple jobs targeting same method. Single error should be enough ;) .Select(benchmark => new ValidationError( true, $"Benchmarked method `{benchmark.Target.Method.Name}` contains illegal character(s). Please use `[<Benchmark(Description = \"Custom name\")>]` to set custom display name.", benchmark ));
public bool TreatsWarningsAsErrors => true; // it is a must! public IEnumerable<ValidationError> Validate(ValidationParameters input) { foreach (var groupByType in input.Benchmarks.GroupBy(benchmark => benchmark.Target.Type)) { var allMethods = groupByType.Key.GetAllMethods(); var count = allMethods.Count(method => method.GetCustomAttributes(false).OfType<BenchmarkAttribute>() .Any(benchmarkAttribute => benchmarkAttribute.Baseline)); if (count > 1) { yield return new ValidationError( TreatsWarningsAsErrors, $"Only 1 [Benchmark] in a class can have \"Baseline = true\" applied to it, class {groupByType.Key.Name} has {count}"); } } }
public bool TreatsWarningsAsErrors => true; // it is a must! public IEnumerable <ValidationError> Validate(ValidationParameters input) { foreach (var groupByType in input.Benchmarks.GroupBy(benchmark => benchmark.Target.Type)) { var allMethods = groupByType.Key.GetAllMethods(); var count = allMethods.Count(method => method.GetCustomAttributes(false).OfType <BenchmarkAttribute>() .Any(benchmarkAttribute => benchmarkAttribute.Baseline)); if (count > 1) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Only 1 [Benchmark] in a class can have \"Baseline = true\" applied to it, class {groupByType.Key.Name} has {count}")); } } }
public bool TreatsWarningsAsErrors => true; // it is a must! public IEnumerable <ValidationError> Validate(ValidationParameters input) { var validationErrors = new List <ValidationError>(); foreach (var groupByType in input.Benchmarks.GroupBy(benchmark => benchmark.Descriptor.Type)) { var allMethods = groupByType.Key.GetAllMethods().ToArray(); validationErrors.AddRange(ValidateAttributes <GlobalSetupAttribute>(groupByType.Key.Name, allMethods)); validationErrors.AddRange(ValidateAttributes <GlobalCleanupAttribute>(groupByType.Key.Name, allMethods)); validationErrors.AddRange(ValidateAttributes <IterationSetupAttribute>(groupByType.Key.Name, allMethods)); validationErrors.AddRange(ValidateAttributes <IterationSetupAttribute>(groupByType.Key.Name, allMethods)); } return(validationErrors); }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var errors = new List <ValidationError>(); foreach (var typeGroup in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Target.Type)) { if (!TryCreateBenchmarkTypeInstance(typeGroup.Key, errors, out var benchmarkTypeInstance)) { continue; } ExecuteBenchmarks(benchmarkTypeInstance, typeGroup, errors); } return(errors); }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var resolver = new CompositeResolver(EnvResolver.Instance, EngineResolver.Instance); // TODO: use specified resolver. foreach (var benchmark in validationParameters.Benchmarks) { var run = benchmark.Job.Run; int unrollFactor = run.ResolveValue(RunMode.UnrollFactorCharacteristic, resolver); if (unrollFactor <= 0) { yield return(new ValidationError(true, $"Specified UnrollFactor ({unrollFactor}) must be greater than zero", benchmark)); } else if (run.HasValue(RunMode.InvocationCountCharacteristic)) { int invocationCount = run.InvocationCount; if (invocationCount % unrollFactor != 0) { string message = $"Specified InvocationCount ({invocationCount}) must be a multiple of UnrollFactor ({unrollFactor})"; yield return(new ValidationError(true, message, benchmark)); } } int minTargetCount = run.ResolveValue(RunMode.MinTargetIterationCountCharacteristic, resolver); int maxTargetCount = run.ResolveValue(RunMode.MaxTargetIterationCountCharacteristic, resolver); if (minTargetCount <= 0) { yield return(new ValidationError(true, $"{nameof(RunMode.MinTargetIterationCount)} must be greater than zero (was {minTargetCount})", benchmark)); } if (maxTargetCount <= 0) { yield return(new ValidationError(true, $"{nameof(RunMode.MaxTargetIterationCount)} must be greater than zero (was {maxTargetCount})", benchmark)); } if (minTargetCount >= maxTargetCount) { yield return(new ValidationError(true, $"{nameof(RunMode.MaxTargetIterationCount)} must be greater than {nameof(RunMode.MinTargetIterationCount)} (was {maxTargetCount} and {minTargetCount})", benchmark)); } } }
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters) { var resolver = EnvResolver.Instance; // TODO: use specified resolver. foreach (var benchmark in validationParameters.Benchmarks) { var run = benchmark.Job.Run; int unrollFactor = run.ResolveValue(RunMode.UnrollFactorCharacteristic, resolver); if (unrollFactor <= 0) { string message = $"Specified UnrollFactor ({unrollFactor}) must be greater than zero"; yield return new ValidationError(true, message, benchmark); } else if (run.HasValue(RunMode.InvocationCountCharacteristic)) { int invocationCount = run.InvocationCount; if (invocationCount % unrollFactor != 0) { string message = $"Specified InvocationCount ({invocationCount}) must be a multiple of UnrollFactor ({unrollFactor})"; yield return new ValidationError(true, message, benchmark); } } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { var resolver = EnvResolver.Instance; // TODO: use specified resolver. foreach (var benchmark in validationParameters.Benchmarks) { var run = benchmark.Job.Run; int unrollFactor = run.ResolveValue(RunMode.UnrollFactorCharacteristic, resolver); if (unrollFactor <= 0) { string message = $"Specified UnrollFactor ({unrollFactor}) must be greater than zero"; yield return(new ValidationError(true, message, benchmark)); } else if (run.HasValue(RunMode.InvocationCountCharacteristic)) { int invocationCount = run.InvocationCount; if (invocationCount % unrollFactor != 0) { string message = $"Specified InvocationCount ({invocationCount}) must be a multiple of UnrollFactor ({unrollFactor})"; yield return(new ValidationError(true, message, benchmark)); } } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { foreach (var group in validationParameters.Benchmarks.GroupBy(benchmark => benchmark.Target.Type.GetTypeInfo().Assembly)) { foreach (var referencedAssemblyName in group.Key.GetReferencedAssemblies()) { var referencedAssembly = Assembly.Load(referencedAssemblyName); if (referencedAssembly.IsJitOptimizationDisabled().IsTrue()) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks references non-optimized {referencedAssemblyName.Name}")); } } if (group.Key.IsJitOptimizationDisabled().IsTrue()) { yield return(new ValidationError( TreatsWarningsAsErrors, $"Assembly {group.Key.GetName().Name} which defines benchmarks is non-optimized")); } } }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) => validationParameters .Config .GetDiagnosers() .SelectMany(diagnoser => diagnoser.Validate(validationParameters));
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) => ValidateCSharpNaming(validationParameters.Benchmarks) .Union(ValidateNamingConflicts(validationParameters.Benchmarks)) .Union(ValidateAccessModifiers(validationParameters.Benchmarks));
public IEnumerable<ValidationError> Validate(ValidationParameters input) { yield return new ValidationError(true, "It just fails"); }
public IEnumerable <ValidationError> Validate(ValidationParameters validationParameters) { return(Validators.SelectMany(validator => validator.Validate(validationParameters))); }
public IEnumerable<ValidationError> Validate(ValidationParameters validationParameters) { return Validators.SelectMany(validator => validator.Validate(validationParameters)); }