public static void Log(Summary summary, ITestOutputHelper output) { var logger = new OutputLogger(output); // We construct HostEnvironmentInfo manually, so that we can have the HardwareTimerKind enum as text, rather than an integer // SimpleJson serializer doesn't seem to have an enum String/Value option (to-be-fair, it is meant to be "Simple") //var environmentInfo = new //{ // HostEnvironmentInfo.BenchmarkDotNetCaption, // summary.HostEnvironmentInfo.BenchmarkDotNetVersion, // OsVersion = summary.HostEnvironmentInfo.OsVersion.Value, // ProcessorName = ProcessorBrandStringHelper.Prettify(summary.HostEnvironmentInfo.CpuInfo.Value), // summary.HostEnvironmentInfo.CpuInfo.Value?.PhysicalProcessorCount, // summary.HostEnvironmentInfo.CpuInfo.Value?.PhysicalCoreCount, // summary.HostEnvironmentInfo.CpuInfo.Value?.LogicalCoreCount, // summary.HostEnvironmentInfo.RuntimeVersion, // summary.HostEnvironmentInfo.Architecture, // summary.HostEnvironmentInfo.HasAttachedDebugger, // summary.HostEnvironmentInfo.HasRyuJit, // summary.HostEnvironmentInfo.Configuration, // DotNetCliVersion = summary.HostEnvironmentInfo.DotNetSdkVersion.Value, // summary.HostEnvironmentInfo.ChronometerFrequency, // HardwareTimerKind = summary.HostEnvironmentInfo.HardwareTimerKind.ToString() //}; // If we just ask SimpleJson to serialize the entire "summary" object it throws several errors. // So we are more specific in what we serialize (plus some fields/properties aren't relevant) var benchmarks = summary.Reports.Select(report => { var data = new Dictionary <string, object> { // We don't need Benchmark.ShortInfo, that info is available via Benchmark.Parameters below { "DisplayInfo", report.BenchmarkCase.DisplayInfo }, { "Namespace", report.BenchmarkCase.Descriptor.Type.Namespace }, { "Type", GetTypeName(report.BenchmarkCase.Descriptor.Type) }, { "Method", report.BenchmarkCase.Descriptor.WorkloadMethod.Name }, { "MethodTitle", report.BenchmarkCase.Descriptor.WorkloadMethodDisplayInfo }, { "Parameters", report.BenchmarkCase.Parameters.PrintInfo }, { "FullName", FullNameProvider.GetBenchmarkName(report.BenchmarkCase) }, // do NOT remove this property, it is used for xunit-performance migration { "ExecutionTime", $"{report.ResultStatistics.Mean.ToTimeStr(TimeUnit.GetBestTimeUnit(report.ResultStatistics.Mean))}" }, }; // We show MemoryDiagnoser's results only if it is being used if (report.BenchmarkCase.Config.HasMemoryDiagnoser()) { data.Add("Memory", report.GcStats); } return(data); }); logger.WriteLine(JsonConvert.SerializeObject(new Dictionary <string, object> { { "Title", summary.Title }, // { "HostEnvironmentInfo", environmentInfo }, { "Benchmarks", benchmarks } }, Formatting.Indented)); }
protected virtual IReadOnlyDictionary <string, object> GetDataToSerialize(BenchmarkReport report) { var benchmark = new Dictionary <string, object> { // We don't need Benchmark.ShortInfo, that info is available via Benchmark.Parameters below { "DisplayInfo", report.BenchmarkCase.DisplayInfo }, { "Namespace", report.BenchmarkCase.Descriptor.Type.Namespace }, { "Type", FullNameProvider.GetTypeName(report.BenchmarkCase.Descriptor.Type) }, { "Method", report.BenchmarkCase.Descriptor.WorkloadMethod.Name }, { "MethodTitle", report.BenchmarkCase.Descriptor.WorkloadMethodDisplayInfo }, { "Parameters", report.BenchmarkCase.Parameters.PrintInfo }, { "FullName", FullNameProvider.GetBenchmarkName(report.BenchmarkCase) }, // do NOT remove this property, it is used for xunit-performance migration // Hardware Intrinsics can be disabled using env vars, that is why they might be different per benchmark and are not exported as part of HostEnvironmentInfo { "HardwareIntrinsics", report.GetHardwareIntrinsicsInfo() ?? "" }, // { "Properties", r.Benchmark.Job.ToSet().ToDictionary(p => p.Name, p => p.Value) }, // TODO { "Statistics", report.ResultStatistics } }; // We show MemoryDiagnoser's results only if it is being used if (report.BenchmarkCase.Config.HasMemoryDiagnoser()) { benchmark.Add("Memory", new { report.GcStats.Gen0Collections, report.GcStats.Gen1Collections, report.GcStats.Gen2Collections, report.GcStats.TotalOperations, BytesAllocatedPerOperation = report.GcStats.GetBytesAllocatedPerOperation(report.BenchmarkCase) }); } if (ExcludeMeasurements == false) { // We construct Measurements manually, so that we can have the IterationMode enum as text, rather than an integer benchmark.Add("Measurements", report.AllMeasurements.Select(m => new { IterationMode = m.IterationMode.ToString(), IterationStage = m.IterationStage.ToString(), m.LaunchIndex, m.IterationIndex, m.Operations, m.Nanoseconds })); if (report.Metrics.Any()) { benchmark.Add("Metrics", report.Metrics.Values); } } return(benchmark); }
protected static string GetSessionName(BenchmarkCase benchmarkCase) { string benchmarkName = FullNameProvider.GetBenchmarkName(benchmarkCase); if (benchmarkName.Length <= MaxSessionNameLength) { return(benchmarkName); } // session name is not really used by humans, we can just give it the hashcode value return($"BenchmarkDotNet.EtwProfiler.Session_{Hashing.HashString(benchmarkName)}"); }
private static string GetBenchmarkName(BenchmarkCase benchmark) { var fullName = FullNameProvider.GetBenchmarkName(benchmark); // FullBenchmarkName is passed to Process.Start as an argument and each OS limits the max argument length, so we have to limit it too // Windows limit is 32767 chars, Unix is 128*1024 but we use 1024 as a common sense limit if (fullName.Length < 1024) { return(fullName); } string typeName = FullNameProvider.GetTypeName(benchmark.Descriptor.Type); string methodName = benchmark.Descriptor.WorkloadMethod.Name; string paramsHash = benchmark.HasParameters ? "paramsHash_" + Hashing.HashString(FullNameProvider.GetMethodName(benchmark)).ToString() : string.Empty; return($"{typeName}.{methodName}({paramsHash})"); }
private static string GetLimitedFilePath(DiagnoserActionParameters details, DateTime creationTime, string fileExtension, int limit) { string shortTypeName = FolderNameHelper.ToFolderName(details.BenchmarkCase.Descriptor.Type, includeNamespace: false); string methodName = details.BenchmarkCase.Descriptor.WorkloadMethod.Name; string parameters = details.BenchmarkCase.HasParameters ? $"-hash{Hashing.HashString(FullNameProvider.GetMethodName(details.BenchmarkCase))}" : string.Empty; string fileName = $@"{shortTypeName}.{methodName}{parameters}"; string finalResult = GetFilePath(fileName, details, creationTime, fileExtension); if (finalResult.Length > limit) { throw new NotSupportedException($"The full benchmark name: \"{fileName}\" combined with artifacts path: \"{details.Config.ArtifactsPath}\" is too long. " + $"Please set the value of {nameof(details.Config)}.{nameof(details.Config.ArtifactsPath)} to shorter path or rename the type or method."); } return(finalResult); }
private string GetFilePath(DiagnoserActionParameters details, DateTime creationTime) { var folderPath = details.Config.ArtifactsPath; folderPath = Path.Combine(folderPath, $"{creationTime:yyyyMMdd-hhmm}-{Process.GetCurrentProcess().Id}"); // if we run for more than one toolchain, the output file name should contain the name too so we can differ net461 vs netcoreapp2.1 etc if (details.Config.GetJobs().Select(job => job.Infrastructure.Toolchain).Distinct().Count() > 1) { folderPath = Path.Combine(folderPath, details.BenchmarkCase.Job.Infrastructure.Toolchain.Name); } if (!string.IsNullOrWhiteSpace(details.BenchmarkCase.Descriptor.Type.Namespace)) { folderPath = Path.Combine(folderPath, details.BenchmarkCase.Descriptor.Type.Namespace.Replace('.', Path.DirectorySeparatorChar)); } folderPath = Path.Combine(folderPath, FolderNameHelper.ToFolderName(details.BenchmarkCase.Descriptor.Type, includeNamespace: false)); var fileName = FolderNameHelper.ToFolderName(FullNameProvider.GetMethodName(details.BenchmarkCase)); return(Path.Combine(folderPath, $"{fileName}{FileExtension}")); }
private void AssertBenchmarkName <T>(string expectedBenchmarkName) { var benchmark = BenchmarkConverter.TypeToBenchmarks(typeof(T)).BenchmarksCases.Single(); Assert.Equal(expectedBenchmarkName, FullNameProvider.GetBenchmarkName(benchmark)); }
public override void ExportToLog(Summary summary, ILogger logger) { // We construct HostEnvironmentInfo manually, so that we can have the HardwareTimerKind enum as text, rather than an integer // SimpleJson serializer doesn't seem to have an enum String/Value option (to-be-fair, it is meant to be "Simple") var environmentInfo = new { HostEnvironmentInfo.BenchmarkDotNetCaption, summary.HostEnvironmentInfo.BenchmarkDotNetVersion, OsVersion = summary.HostEnvironmentInfo.OsVersion.Value, ProcessorName = ProcessorBrandStringHelper.Prettify(summary.HostEnvironmentInfo.CpuInfo.Value?.ProcessorName ?? ""), summary.HostEnvironmentInfo.CpuInfo.Value?.PhysicalProcessorCount, summary.HostEnvironmentInfo.CpuInfo.Value?.PhysicalCoreCount, summary.HostEnvironmentInfo.CpuInfo.Value?.LogicalCoreCount, summary.HostEnvironmentInfo.RuntimeVersion, summary.HostEnvironmentInfo.Architecture, summary.HostEnvironmentInfo.HasAttachedDebugger, summary.HostEnvironmentInfo.HasRyuJit, summary.HostEnvironmentInfo.Configuration, summary.HostEnvironmentInfo.JitModules, DotNetCliVersion = summary.HostEnvironmentInfo.DotNetSdkVersion.Value, summary.HostEnvironmentInfo.ChronometerFrequency, HardwareTimerKind = summary.HostEnvironmentInfo.HardwareTimerKind.ToString() }; // If we just ask SimpleJson to serialise the entire "summary" object it throws several errors. // So we are more specific in what we serialise (plus some fields/properties aren't relevant) var benchmarks = summary.Reports.Select(r => { var data = new Dictionary <string, object> { // We don't need Benchmark.ShortInfo, that info is available via Benchmark.Parameters below { "DisplayInfo", r.BenchmarkCase.DisplayInfo }, { "Namespace", r.BenchmarkCase.Descriptor.Type.Namespace }, { "Type", FullNameProvider.GetTypeName(r.BenchmarkCase.Descriptor.Type) }, { "Method", r.BenchmarkCase.Descriptor.WorkloadMethod.Name }, { "MethodTitle", r.BenchmarkCase.Descriptor.WorkloadMethodDisplayInfo }, { "Parameters", r.BenchmarkCase.Parameters.PrintInfo }, { "FullName", FullNameProvider.GetBenchmarkName(r.BenchmarkCase) }, // do NOT remove this property, it is used for xunit-performance migration // { "Properties", r.Benchmark.Job.ToSet().ToDictionary(p => p.Name, p => p.Value) }, // TODO { "Statistics", r.ResultStatistics } }; // We show MemoryDiagnoser's results only if it is being used if (summary.Config.HasMemoryDiagnoser()) { data.Add("Memory", r.GcStats); } if (ExcludeMeasurements == false) { // We construct Measurements manually, so that we can have the IterationMode enum as text, rather than an integer data.Add("Measurements", r.AllMeasurements.Select(m => new { IterationMode = m.IterationMode.ToString(), IterationStage = m.IterationStage.ToString(), m.LaunchIndex, m.IterationIndex, m.Operations, m.Nanoseconds })); } return(data); }); JsonSerializer.CurrentJsonSerializerStrategy.Indent = IndentJson; logger.WriteLine(JsonSerializer.SerializeObject(new Dictionary <string, object> { { "Title", summary.Title }, { "HostEnvironmentInfo", environmentInfo }, { "Benchmarks", benchmarks } })); }
public static void RunCore(IHost host, BenchmarkCase benchmarkCase, BenchmarkActionCodegen codegenMode, IConfig config) { var target = benchmarkCase.Descriptor; var job = benchmarkCase.Job; // TODO: filter job (same as SourceCodePresenter does)? int unrollFactor = benchmarkCase.Job.ResolveValue(RunMode.UnrollFactorCharacteristic, EnvironmentResolver.Instance); // DONTTOUCH: these should be allocated together var instance = Activator.CreateInstance(benchmarkCase.Descriptor.Type); var workloadAction = BenchmarkActionFactory.CreateWorkload(target, instance, codegenMode, unrollFactor); var overheadAction = BenchmarkActionFactory.CreateOverhead(target, instance, codegenMode, unrollFactor); var globalSetupAction = BenchmarkActionFactory.CreateGlobalSetup(target, instance); var globalCleanupAction = BenchmarkActionFactory.CreateGlobalCleanup(target, instance); var iterationSetupAction = BenchmarkActionFactory.CreateIterationSetup(target, instance); var iterationCleanupAction = BenchmarkActionFactory.CreateIterationCleanup(target, instance); var dummy1 = BenchmarkActionFactory.CreateDummy(); var dummy2 = BenchmarkActionFactory.CreateDummy(); var dummy3 = BenchmarkActionFactory.CreateDummy(); FillMembers(instance, benchmarkCase); host.WriteLine(); foreach (string infoLine in BenchmarkEnvironmentInfo.GetCurrent().ToFormattedString()) { host.WriteLine("// {0}", infoLine); } host.WriteLine("// Job: {0}", job.DisplayInfo); host.WriteLine(); var engineParameters = new EngineParameters { Host = host, WorkloadActionNoUnroll = invocationCount => { for (int i = 0; i < invocationCount; i++) { workloadAction.InvokeSingle(); } }, WorkloadActionUnroll = workloadAction.InvokeMultiple, Dummy1Action = dummy1.InvokeSingle, Dummy2Action = dummy2.InvokeSingle, Dummy3Action = dummy3.InvokeSingle, OverheadActionNoUnroll = invocationCount => { for (int i = 0; i < invocationCount; i++) { overheadAction.InvokeSingle(); } }, OverheadActionUnroll = overheadAction.InvokeMultiple, GlobalSetupAction = globalSetupAction.InvokeSingle, GlobalCleanupAction = globalCleanupAction.InvokeSingle, IterationSetupAction = iterationSetupAction.InvokeSingle, IterationCleanupAction = iterationCleanupAction.InvokeSingle, TargetJob = job, OperationsPerInvoke = target.OperationsPerInvoke, MeasureGcStats = config.HasMemoryDiagnoser(), BenchmarkName = FullNameProvider.GetBenchmarkName(benchmarkCase) }; using (var engine = job .ResolveValue(InfrastructureMode.EngineFactoryCharacteristic, InfrastructureResolver.Instance) .CreateReadyToRun(engineParameters)) { var results = engine.Run(); host.ReportResults(results); // printing costs memory, do this after runs } }
private static string GetFilePathNoLimits(DiagnoserActionParameters details, DateTime creationTime, string fileExtension) { string fileName = $@"{FolderNameHelper.ToFolderName(details.BenchmarkCase.Descriptor.Type)}.{FullNameProvider.GetMethodName(details.BenchmarkCase)}"; return(GetFilePath(fileName, details, creationTime, fileExtension)); }
public override void ExportToLog(Summary summary, ILogger logger) { var reporter = Reporter.CreateReporter(); if (!reporter.InLab) // not running in the perf lab { return; } DisassemblyDiagnoser disassemblyDiagnoser = summary.Reports .FirstOrDefault()? // dissasembler was either enabled for all or none of them (so we use the first one) .BenchmarkCase.Config.GetDiagnosers().OfType <DisassemblyDiagnoser>().FirstOrDefault(); foreach (var report in summary.Reports) { var test = new Test(); test.Name = FullNameProvider.GetBenchmarkName(report.BenchmarkCase); test.Categories = report.BenchmarkCase.Descriptor.Categories; var results = from result in report.AllMeasurements where result.IterationMode == Engines.IterationMode.Workload && result.IterationStage == Engines.IterationStage.Result orderby result.LaunchIndex, result.IterationIndex select new { result.Nanoseconds, result.Operations }; var overheadResults = from result in report.AllMeasurements where result.IsOverhead() && result.IterationStage != Engines.IterationStage.Jitting orderby result.LaunchIndex, result.IterationIndex select new { result.Nanoseconds, result.Operations }; test.Counters.Add(new Counter { Name = "Duration of single invocation", TopCounter = true, DefaultCounter = true, HigherIsBetter = false, MetricName = "ns", Results = (from result in results select result.Nanoseconds / result.Operations).ToList() }); test.Counters.Add(new Counter { Name = "Overhead invocation", TopCounter = false, DefaultCounter = false, HigherIsBetter = false, MetricName = "ns", Results = (from result in overheadResults select result.Nanoseconds / result.Operations).ToList() }); test.Counters.Add(new Counter { Name = "Duration", TopCounter = false, DefaultCounter = false, HigherIsBetter = false, MetricName = "ms", Results = (from result in results select result.Nanoseconds).ToList() }); test.Counters.Add(new Counter { Name = "Operations", TopCounter = false, DefaultCounter = false, HigherIsBetter = true, MetricName = "Count", Results = (from result in results select(double) result.Operations).ToList() }); foreach (var metric in report.Metrics.Keys) { var m = report.Metrics[metric]; test.Counters.Add(new Counter { Name = m.Descriptor.DisplayName, TopCounter = false, DefaultCounter = false, HigherIsBetter = m.Descriptor.TheGreaterTheBetter, MetricName = m.Descriptor.Unit, Results = new[] { m.Value } }); } if (disassemblyDiagnoser != null && disassemblyDiagnoser.Results.TryGetValue(report.BenchmarkCase, out var disassemblyResult)) { string disassembly = DiffableDisassemblyExporter.BuildDisassemblyString(disassemblyResult, disassemblyDiagnoser.Config); test.AdditionalData["disasm"] = disassembly; } reporter.AddTest(test); } logger.WriteLine(reporter.GetJson()); }
public UserSession(DiagnoserActionParameters details, EtwProfilerConfig config, DateTime creationTime) : base(FullNameProvider.GetBenchmarkName(details.BenchmarkCase), details, config, creationTime) { }
private string GetFilePath(DiagnoserActionParameters details, DateTime creationTime) { string fileName = $@"{FolderNameHelper.ToFolderName(details.BenchmarkCase.Descriptor.Type)}.{FullNameProvider.GetMethodName(details.BenchmarkCase)}"; // if we run for more than one toolchain, the output file name should contain the name too so we can differ net461 vs netcoreapp2.1 etc if (details.Config.GetJobs().Select(job => job.GetToolchain()).Distinct().Count() > 1) { fileName += $"-{details.BenchmarkCase.Job.Environment.Runtime?.Name ?? details.BenchmarkCase.GetToolchain()?.Name ?? details.BenchmarkCase.Job.Id}"; } fileName += $"-{creationTime.ToString(BenchmarkRunnerClean.DateTimeFormat)}"; fileName = FolderNameHelper.ToFolderName(fileName); return(Path.Combine(details.Config.ArtifactsPath, $"{fileName}{FileExtension}")); }
public override void ExportToLog(Summary summary, ILogger logger) { var reporter = Reporter.CreateReporter(); if (reporter == null) // not running in the perf lab { return; } foreach (var report in summary.Reports) { var test = new Test(); test.Name = FullNameProvider.GetBenchmarkName(report.BenchmarkCase); test.Categories = report.BenchmarkCase.Descriptor.Categories; var results = from result in report.AllMeasurements where result.IterationMode == Engines.IterationMode.Workload && result.IterationStage == Engines.IterationStage.Result orderby result.LaunchIndex, result.IterationIndex select new { result.Nanoseconds, result.Operations }; test.Counters.Add(new Counter { Name = "Duration of single invocation", TopCounter = true, DefaultCounter = true, HigherIsBetter = false, MetricName = "ns", Results = (from result in results select result.Nanoseconds / result.Operations).ToList() }); test.Counters.Add(new Counter { Name = "Duration", TopCounter = false, DefaultCounter = false, HigherIsBetter = false, MetricName = "ms", Results = (from result in results select result.Nanoseconds).ToList() }); test.Counters.Add(new Counter { Name = "Operations", TopCounter = false, DefaultCounter = false, HigherIsBetter = true, MetricName = "Count", Results = (from result in results select(double) result.Operations).ToList() }); foreach (var metric in report.Metrics.Keys) { var m = report.Metrics[metric]; test.Counters.Add(new Counter { Name = m.Descriptor.DisplayName, TopCounter = false, DefaultCounter = false, HigherIsBetter = m.Descriptor.TheGreaterTheBetter, MetricName = m.Descriptor.Unit, Results = new[] { m.Value } }); } reporter.AddTest(test); } logger.WriteLine(reporter.GetJson()); }
public BenchmarkId(int value, BenchmarkCase benchmarkCase) { Value = value; FullBenchmarkName = FullNameProvider.GetBenchmarkName(benchmarkCase); JobId = benchmarkCase.Job.Id; }