public ComparableResult(BenchmarkResult result) { if (result == null) { throw new ArgumentNullException(nameof(result)); } this.result = result; sat = int.Parse(result.Properties[Z3Domain.KeySat], CultureInfo.InvariantCulture); unsat = int.Parse(result.Properties[Z3Domain.KeyUnsat], CultureInfo.InvariantCulture); unknown = int.Parse(result.Properties[Z3Domain.KeyUnknown], CultureInfo.InvariantCulture); }
public static BenchmarkResult RunBenchmark(int experimentId, string executable, string args, string inputDisplayName, string inputFullPath, int repetitions, TimeSpan timeOut, double memLimitMB, long?ouputLimit, long?errorLimit, Domain domain, double normal, int maxRepetitions = 10, double maxTimeInSeconds = 10) { if (domain == null) { throw new ArgumentNullException("domain"); } if (args != null) { args = args.Replace("{0}", inputFullPath); } else { args = ""; } DateTime acq = DateTime.Now; int maxCount = repetitions == 0 ? maxRepetitions : repetitions; TimeSpan maxTime = TimeSpan.FromSeconds(maxTimeInSeconds); int count = 0; List <ProcessRunMeasure> measures = new List <ProcessRunMeasure>(); TimeSpan total = TimeSpan.FromSeconds(0); ProcessRunAnalysis analysis = null; ProcessRunMeasure m; do { m = ProcessMeasurer.Measure(executable, args, timeOut, memLimitMB, ouputLimit, errorLimit, domain); measures.Add(m); count++; total += m.WallClockTime; if (analysis == null) // analyzed only once, repetitions are for more confident run time { analysis = domain.Analyze(inputFullPath, m); } } while ((repetitions != 0 || total < maxTime) && count < maxCount && m.Limits == Measure.LimitsStatus.WithinLimits); ProcessRunMeasure finalMeasure = Utils.AggregateMeasures(measures.ToArray()); Trace.WriteLine(String.Format("Done in {0} (aggregated by {1} runs)", finalMeasure.WallClockTime, count)); var performanceIndex = normal * finalMeasure.TotalProcessorTime.TotalSeconds; var result = new BenchmarkResult( experimentId, inputDisplayName, acq, performanceIndex, finalMeasure.TotalProcessorTime, finalMeasure.WallClockTime, finalMeasure.PeakMemorySizeMB, analysis.Status, finalMeasure.ExitCode, finalMeasure.StdOut, finalMeasure.StdErr, analysis.OutputProperties); return(result); }
public static BenchmarkResult[] LoadBenchmarks(int expId, Stream stream) { var table = Table.Load(new StreamReader(stream), new ReadSettings(Delimiter.Comma, true, true, FSharpOption <int> .None, FSharpOption <FSharpFunc <Tuple <int, string>, FSharpOption <Type> > > .Some(FSharpFunc <Tuple <int, string>, FSharpOption <Type> > .FromConverter(tuple => FSharpOption <Type> .Some(typeof(string)))))); var fileName = table["BenchmarkFileName"].Rows.AsString; var acq = table["AcquireTime"].Rows.AsString; var norm = table["NormalizedRuntime"].Rows.AsString; var runtime = table["TotalProcessorTime"].Rows.AsString; var wctime = table["WallClockTime"].Rows.AsString; var mem = table["PeakMemorySizeMB"].Rows.AsString; var stat = table["Status"].Rows.AsString; var exitcode = table["ExitCode"].Rows.AsString; var stdout = table["StdOut"].Rows.AsString; var stderr = table["StdErr"].Rows.AsString; var propColumns = (from c in table where c.Name != "BenchmarkFileName" && c.Name != "AcquireTime" && c.Name != "NormalizedRuntime" && c.Name != "TotalProcessorTime" && c.Name != "WallClockTime" && c.Name != "PeakMemorySizeMB" && c.Name != "Status" && c.Name != "ExitCode" && c.Name != "StdOut" && c.Name != "StdErr" select Tuple.Create(c.Name, c.Rows.AsString)) .ToArray(); BenchmarkResult[] results = new BenchmarkResult[table.RowsCount]; for (int i = 0; i < results.Length; i++) { Dictionary <string, string> props = new Dictionary <string, string>(propColumns.Length); foreach (var pc in propColumns) { if (pc.Item2 != null) { props[pc.Item1] = pc.Item2[i]; } } results[i] = new BenchmarkResult( expId, fileName[i], DateTime.Parse(acq[i], CultureInfo.InvariantCulture), double.Parse(norm[i], CultureInfo.InvariantCulture), TimeSpan.FromSeconds(double.Parse(runtime[i], CultureInfo.InvariantCulture)), TimeSpan.FromSeconds(double.Parse(wctime[i], CultureInfo.InvariantCulture)), double.Parse(mem[i], CultureInfo.InvariantCulture), StatusFromString(stat[i]), string.IsNullOrEmpty(exitcode[i]) ? null : (int?)int.Parse(exitcode[i], CultureInfo.InvariantCulture), Utils.StringToStream(stdout[i]), Utils.StringToStream(stderr[i]), props); } return(results); }
public override async Task <ExperimentID> StartExperiment(ExperimentDefinition definition, string creator = null, string note = null, string summaryName = null) { definition = MakeRelativeDefinition(storage.Location, definition); ExperimentID id = Interlocked.Increment(ref lastId); DateTime submitted = DateTime.Now; double normal = await asyncNormal; var results = runner.Enqueue(id, definition, normal); int benchmarksLeft = results.Length; BenchmarkResult[] benchmarks = new BenchmarkResult[results.Length]; var resultsWithSave = results.Select((task, index) => task.ContinueWith(benchmark => { int left = Interlocked.Decrement(ref benchmarksLeft); Trace.WriteLine(String.Format("Benchmark {0} completed, {1} left", index, left)); if (benchmark.IsCompleted && !benchmark.IsFaulted) { benchmarks[index] = benchmark.Result; if (left == 0) { storage.AddResults(id, benchmarks); ExperimentInstance val; runningExperiments.TryRemove(id, out val); } return(benchmark.Result); } else { throw benchmark.Exception; } })) .ToArray(); ExperimentInstance experiment = new ExperimentInstance(id, definition, resultsWithSave); runningExperiments[id] = experiment; storage.AddExperiment(id, definition, submitted, creator, note); return(id); }