Esempio n. 1
0
        /// <summary>
        /// Runs the benchmark suite and returns the results.
        /// </summary>
        /// <returns>The results of the benchmark run</returns>
        public RunResult Run()
        {
            ThrowIfCancellationRequested();
            RunResult result = new RunResult();

            result.Settings    = m_run.Settings;
            result.Environment = new RunEnvironment();
            result.Environment.RunStartTime         = DateTime.Now;
            result.Environment.FrameworkDescription = RuntimeInformation.FrameworkDescription;
            result.Environment.OperatingSystem      = RuntimeInformation.OSDescription;
            result.Environment.OSArchitecture       = RuntimeInformation.OSArchitecture.ToString();
            Logger.LogAlways($"Running benchmarks with server GC: {m_run.Settings.ServerGC}");
            Logger.LogAlways($"Running benchmarks with concurrent GC: {m_run.Settings.ConcurrentGC}");
            foreach (var version in m_run.CoreClrVersions)
            {
                // these should have been validated already before runnning
                Debug.Assert(!string.IsNullOrEmpty(version.Path));
                Debug.Assert(!string.IsNullOrEmpty(version.Name));
                Debug.Assert(Directory.Exists(version.Path));
                Debug.Assert(m_versionMap.ContainsKey(version));
                PreparedCoreClrVersion  preparedVersion = m_versionMap[version];
                CoreclrVersionRunResult versionResult   = RunVersion(preparedVersion);
                result.PerVersionResults.Add(Tuple.Create(version, versionResult));
            }

            ThrowIfCancellationRequested();
            return(result);
        }
Esempio n. 2
0
        /// <summary>
        /// Runs a single iteration of a benchmark. If no iteration number if specified,
        /// the benchmark is run once.
        /// </summary>
        /// <param name="version">The version of CoreCLR to run the benchmark on</param>
        /// <param name="bench">The benchmark to run</param>
        /// <param name="termCondition">The termination condition for this benchmark</param>
        /// <returns>The result of running the benchmark</returns>
        private BenchmarkResult RunBenchmarkImplWithIterations(PreparedCoreClrVersion version, Benchmark bench, TerminationCondition termCondition)
        {
            ThrowIfCancellationRequested();
            Logger.LogAlways($"Running iterations for benchmark {bench.Name}");
            BenchmarkResult result = new BenchmarkResult();

            result.Benchmark = bench;
            int iterations = bench.Iterations ?? 1;

            for (int i = 0; i < iterations; i++)
            {
                Logger.LogAlways($"Beginning iteration {i} for benchmark {bench.Name}");
                // we'll create subdirectories for every iteration.
                string folderName = Path.Combine(Directory.GetCurrentDirectory(), i.ToString());
                Directory.CreateDirectory(folderName);
                Directory.SetCurrentDirectory(folderName);
                m_relativePath.Push(i.ToString());
                try
                {
                    IterationResult iterResult;
                    string          traceName = bench.Name + ".etl";
                    m_traceCollector.StartTrace(bench.Name + ".etl", m_run.CollectionLevel);
                    try
                    {
                        // we've got everything set up, time to run.
                        iterResult = RunBenchmarkImpl(version, bench, termCondition);
                    }
                    finally
                    {
                        m_traceCollector.StopTrace();
                    }

                    var currentRelativePath = Path.Combine(m_relativePath.Reverse().ToArray());

                    // TODO(segilles, xplat) adding .zip on the end is done by PerfView, perfcollect
                    // probably doesn't do this.
                    iterResult.TracePathLocation = Path.Combine(currentRelativePath, traceName + ".zip");

                    // write out the result json file that the analysis engine is expecting
                    File.WriteAllText(
                        Path.Combine(Directory.GetCurrentDirectory(), Constants.ResultJsonName),
                        JsonConvert.SerializeObject(iterResult, Formatting.Indented));
                    result.Iterations.Add(iterResult);
                }
                finally
                {
                    string upOneDir = Path.Combine(Directory.GetCurrentDirectory(), "..");
                    Directory.SetCurrentDirectory(upOneDir);
                    m_relativePath.Pop();
                }
            }

            // write out the benchmark json
            File.WriteAllText(
                Path.Combine(Directory.GetCurrentDirectory(), Constants.BenchmarkJsonName),
                JsonConvert.SerializeObject(bench, Formatting.Indented));

            ThrowIfCancellationRequested();
            return(result);
        }
Esempio n. 3
0
        private static bool FixSingleCoreClrVersion(CoreClrVersion version, RunSettings settings, out PreparedCoreClrVersion preparedVersion)
        {
            // is this a fully-specified version (i.e. the user gave us a full directory)? if so, there's nothing
            // to do here.
            if (!version.IsPartial)
            {
                Logger.LogVerbose($"Version {version.Name} is complete and needs no additional processing");
                preparedVersion = new PreparedCoreClrVersion(version);
                return(true);
            }

            // otherwise, we're going to need to create a new directory and populate it.
            // the idea here is to use the SharedBinaryFolder option given to us to populate
            // the new folder we create and then copy the user provided files on top of it.
            //
            // we need settings.SharedBinaryFolder to be present for this, so we error here
            // if it's not.
            if (string.IsNullOrEmpty(settings.SharedBinaryFolder))
            {
                Logger.LogError("The SharedBinaryFolder settings key was required and not provided. See the documentation for more information.");
                preparedVersion = null;
                return(false);
            }

            if (!Directory.Exists(settings.SharedBinaryFolder))
            {
                Logger.LogError("The SharedBinaryFolder settings key referred to a folder that does not exist. See the documentation for more information.");
                preparedVersion = null;
                return(false);
            }

            Debug.Assert(Path.IsPathRooted(settings.SharedBinaryFolder));
            var tempPath = Path.Combine(Path.GetTempPath(), "CoreGCBench.Runner", Guid.NewGuid().ToString());

            Logger.LogVerbose($"Version {version.Name} is partial, using temp directory {tempPath}");
            Directory.CreateDirectory(tempPath);

            Logger.LogVerbose($"Copying files from {settings.SharedBinaryFolder} to {tempPath}");
            CopyDirectory(version.Path, tempPath);

            foreach (var file in version.Files)
            {
                Logger.LogVerbose($"Copying file {file} from {version.Path} to {tempPath}");
                string filePath = Path.Combine(version.Path, file);
                if (!File.Exists(filePath))
                {
                    Logger.LogError($"File {file} for version {version.Name} does not exist at location {version.Path}");
                    preparedVersion = null;
                    return(false);
                }

                string targetPath = filePath.Replace(version.Path, tempPath);
                File.Copy(filePath, targetPath);
            }

            Logger.LogVerbose($"Successfully created temporary version folder at {tempPath}");
            preparedVersion = new PreparedCoreClrVersion(version, tempPath);
            return(true);
        }
Esempio n. 4
0
        /// <summary>
        /// Runs the benchmark suite on a single version of CoreCLR and
        /// returns the results.
        /// </summary>
        /// <param name="coreRootPath">The path to CORE_ROOT for the version
        /// of CoreCLR being tested.</param>
        /// <returns>The results of this run</returns>
        private CoreclrVersionRunResult RunVersion(PreparedCoreClrVersion version)
        {
            ThrowIfCancellationRequested();
            Logger.LogAlways($"Beginning run of version \"{version.Name}\"");
            CoreclrVersionRunResult result = new CoreclrVersionRunResult();

            Debug.Assert(Directory.GetCurrentDirectory() == m_options.OutputDirectory);
            // TODO(segilles) error handling here. We should avoid propegating exceptions
            // as best we can.
            string folderName = Path.Combine(Directory.GetCurrentDirectory(), version.Name);

            Directory.CreateDirectory(folderName);
            Directory.SetCurrentDirectory(folderName);
            m_relativePath.Push(version.Name);
            try
            {
                foreach (var benchmark in m_run.Suite)
                {
                    Debug.Assert(benchmark != null);
                    BenchmarkResult benchResult = RunBenchmark(version, benchmark);
                    result.BenchmarkResults.Add(benchResult);
                }

                // write out the version description
                File.WriteAllText(
                    Path.Combine(folderName, Constants.VersionJsonName),
                    JsonConvert.SerializeObject(version));

                ThrowIfCancellationRequested();
                return(result);
            }
            finally
            {
                string upOneDir = Path.Combine(Directory.GetCurrentDirectory(), "..");
                Directory.SetCurrentDirectory(upOneDir);
                m_relativePath.Pop();
            }
        }
Esempio n. 5
0
        /// <summary>
        /// Runs a single benchmark by spawning a process and monitoring it until
        /// its exit.
        /// </summary>
        /// <param name="version">The coreclr version to test</param>
        /// <param name="bench">The benchmark to run</param>
        /// <param name="termCondition">The termination condition for this benchmark</param>
        /// <returns>The result from running the benchmark</returns>
        private IterationResult RunBenchmarkImpl(PreparedCoreClrVersion version, Benchmark bench, TerminationCondition termCondition)
        {
            ThrowIfCancellationRequested();
            string coreRun   = Path.Combine(version.Path, Utils.CoreRunName);
            string arguments = bench.Arguments ?? "";

            Debug.Assert(File.Exists(coreRun));
            Debug.Assert(m_executableProbeMap.ContainsKey(bench));
            string exePath = m_executableProbeMap[bench];

            Process proc = new Process();

            proc.StartInfo.FileName        = coreRun;
            proc.StartInfo.Arguments       = exePath + " " + arguments;
            proc.StartInfo.UseShellExecute = false;
            proc.StartInfo.CreateNoWindow  = false;
            foreach (var pair in bench.EnvironmentVariables)
            {
                proc.StartInfo.Environment[pair.Key] = pair.Value;
            }

            proc.StartInfo.Environment[Constants.ServerGCVariable]     = m_run.Settings.ServerGC ? "1" : "0";
            proc.StartInfo.Environment[Constants.ConcurrentGCVariable] = m_run.Settings.ConcurrentGC ? "1" : "0";

            // run the process!
            RunProcess(termCondition, proc);
            Debug.Assert(proc.HasExited);

            IterationResult result = new IterationResult();

            result.DurationMsec = (long)(proc.ExitTime - proc.StartTime).TotalMilliseconds;
            result.ExitCode     = proc.ExitCode;
            result.Pid          = proc.Id;

            ThrowIfCancellationRequested();
            return(result);
        }
Esempio n. 6
0
        /// <summary>
        /// Runs a single benchmark on a given version of CoreCLR and saves the results.
        /// </summary>
        /// <param name="version">The version of CoreCLR to run on</param>
        /// <param name="bench">The benchmark to run</param>
        /// <returns>The result from running the benchmark</returns>
        private BenchmarkResult RunBenchmark(PreparedCoreClrVersion version, Benchmark bench)
        {
            ThrowIfCancellationRequested();
            Logger.LogAlways($"Running benchmark {bench.Name}");
            string folderName = Path.Combine(Directory.GetCurrentDirectory(), bench.Name);

            Directory.CreateDirectory(folderName);
            Directory.SetCurrentDirectory(folderName);
            m_relativePath.Push(bench.Name);
            try
            {
                using (TerminationCondition condition = ConstructTerminationCondition(bench))
                {
                    ThrowIfCancellationRequested();
                    return(RunBenchmarkImplWithIterations(version, bench, condition));
                }
            }
            finally
            {
                string upOneDir = Path.Combine(Directory.GetCurrentDirectory(), "..");
                Directory.SetCurrentDirectory(upOneDir);
                m_relativePath.Pop();
            }
        }