/// <summary>
        /// Constructs a graph starting from the given graph entry points and execute all project and
        /// graph predictors against the resulting projects.
        /// </summary>
        /// <param name="projectGraph">Project graph to run predictions on.</param>
        /// <param name="projectPredictionCollector">The prediction collector to use.</param>
        public void PredictInputsAndOutputs(
            ProjectGraph projectGraph,
            IProjectPredictionCollector projectPredictionCollector)
        {
            projectGraph.ThrowIfNull(nameof(projectGraph));
            projectPredictionCollector.ThrowIfNull(nameof(projectPredictionCollector));

            // Special-case single-threaded prediction to avoid the overhead of Parallel.ForEach in favor of a simple loop.
            if (_options.MaxDegreeOfParallelism == 1)
            {
                foreach (var projectNode in projectGraph.ProjectNodes)
                {
                    ExecuteAllPredictors(projectNode, _projectPredictors, _projectGraphPredictors, projectPredictionCollector);
                }
            }
            else
            {
                Parallel.ForEach(
                    projectGraph.ProjectNodes.ToArray(),
                    new ParallelOptions()
                {
                    MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism
                },
                    projectNode => ExecuteAllPredictors(projectNode, _projectPredictors, _projectGraphPredictors, projectPredictionCollector));
            }
        }
        /// <summary>
        /// Executes all predictors against the provided Project and reports all results
        /// to the provided event sink. Custom event sinks can be used to avoid translating
        /// predictions from <see cref="ProjectPredictions"/> to the caller's own object model,
        /// or for custom path normalization logic.
        /// </summary>
        /// <param name="projectInstance">The project instance to execute predictors against.</param>
        /// <param name="projectPredictionCollector">The prediction collector to use.</param>
        public void PredictInputsAndOutputs(ProjectInstance projectInstance, IProjectPredictionCollector projectPredictionCollector)
        {
            projectInstance.ThrowIfNull(nameof(projectInstance));
            projectPredictionCollector.ThrowIfNull(nameof(projectPredictionCollector));

            ExecuteProjectPredictors(projectInstance, _projectPredictors, projectPredictionCollector, _options.MaxDegreeOfParallelism);
        }