/// <summary>
        /// Constructs a graph starting from the given graph entry points and execute all project and
        /// graph predictors against the resulting projects.
        /// </summary>
        /// <param name="projectGraph">Project graph to run predictions on.</param>
        /// <param name="projectPredictionCollector">The prediction collector to use.</param>
        public void PredictInputsAndOutputs(
            ProjectGraph projectGraph,
            IProjectPredictionCollector projectPredictionCollector)
        {
            projectGraph.ThrowIfNull(nameof(projectGraph));
            projectPredictionCollector.ThrowIfNull(nameof(projectPredictionCollector));

            // Special-case single-threaded prediction to avoid the overhead of Parallel.ForEach in favor of a simple loop.
            if (_options.MaxDegreeOfParallelism == 1)
            {
                foreach (var projectNode in projectGraph.ProjectNodes)
                {
                    ExecuteAllPredictors(projectNode, _projectPredictors, _projectGraphPredictors, projectPredictionCollector);
                }
            }
            else
            {
                Parallel.ForEach(
                    projectGraph.ProjectNodes.ToArray(),
                    new ParallelOptions()
                {
                    MaxDegreeOfParallelism = _options.MaxDegreeOfParallelism
                },
                    projectNode => ExecuteAllPredictors(projectNode, _projectPredictors, _projectGraphPredictors, projectPredictionCollector));
            }
        }
 internal static void ExecuteProjectPredictors(
     ProjectInstance projectInstance,
     ValueAndTypeName <IProjectPredictor>[] projectPredictors,
     IProjectPredictionCollector projectPredictionCollector,
     int maxDegreeOfParallelism)
 {
     // Special-case single-threaded prediction to avoid the overhead of Parallel.For in favor of a simple loop.
     if (maxDegreeOfParallelism == 1)
     {
         for (var i = 0; i < projectPredictors.Length; i++)
         {
             ExecuteSingleProjectPredictor(projectInstance, projectPredictors[i], projectPredictionCollector);
         }
     }
     else
     {
         Parallel.For(
             0,
             projectPredictors.Length,
             new ParallelOptions {
             MaxDegreeOfParallelism = maxDegreeOfParallelism
         },
             i => ExecuteSingleProjectPredictor(projectInstance, projectPredictors[i], projectPredictionCollector));
     }
 }
        /// <summary>
        /// Executes all predictors against the provided Project and reports all results
        /// to the provided event sink. Custom event sinks can be used to avoid translating
        /// predictions from <see cref="ProjectPredictions"/> to the caller's own object model,
        /// or for custom path normalization logic.
        /// </summary>
        /// <param name="projectInstance">The project instance to execute predictors against.</param>
        /// <param name="projectPredictionCollector">The prediction collector to use.</param>
        public void PredictInputsAndOutputs(ProjectInstance projectInstance, IProjectPredictionCollector projectPredictionCollector)
        {
            projectInstance.ThrowIfNull(nameof(projectInstance));
            projectPredictionCollector.ThrowIfNull(nameof(projectPredictionCollector));

            ExecuteProjectPredictors(projectInstance, _projectPredictors, projectPredictionCollector, _options.MaxDegreeOfParallelism);
        }
 /// <summary>Initializes a new instance of the <see cref="ProjectPredictionReporter"/> struct.</summary>
 /// <remarks>
 /// Internal to avoid public creation.
 /// </remarks>
 internal ProjectPredictionReporter(
     IProjectPredictionCollector predictionCollector,
     ProjectInstance projectInstance,
     string predictorName)
 {
     _predictionCollector = predictionCollector;
     _projectInstance     = projectInstance;
     _predictorName       = predictorName;
 }
        private static void ExecuteSingleProjectPredictor(
            ProjectInstance projectInstance,
            ValueAndTypeName <IProjectPredictor> projectPredictorAndName,
            IProjectPredictionCollector projectPredictionCollector)
        {
            var predictionReporter = new ProjectPredictionReporter(
                projectPredictionCollector,
                projectInstance,
                projectPredictorAndName.TypeName);

            projectPredictorAndName.Value.PredictInputsAndOutputs(
                projectInstance,
                predictionReporter);
        }
        private static void ExecuteAllPredictors(
            ProjectGraphNode projectGraphNode,
            ValueAndTypeName <IProjectPredictor>[] projectPredictors,
            ValueAndTypeName <IProjectGraphPredictor>[] projectGraphPredictors,
            IProjectPredictionCollector projectPredictionCollector)
        {
            ProjectInstance projectInstance = projectGraphNode.ProjectInstance;

            // Run the project predictors. Use single-threaded prediction since we're already parallelizing on projects.
            ProjectPredictionExecutor.ExecuteProjectPredictors(projectInstance, projectPredictors, projectPredictionCollector, maxDegreeOfParallelism: 1);

            // Run the graph predictors
            for (var i = 0; i < projectGraphPredictors.Length; i++)
            {
                var predictionReporter = new ProjectPredictionReporter(
                    projectPredictionCollector,
                    projectInstance,
                    projectGraphPredictors[i].TypeName);

                projectGraphPredictors[i].Value.PredictInputsAndOutputs(
                    projectGraphNode,
                    predictionReporter);
            }
        }