Пример #1
0
 public InferredPipeline(IEnumerable <SuggestedTransform> transforms,
                         SuggestedTrainer trainer,
                         MLContext context = null)
 {
     Transforms = transforms.Select(t => t.Clone()).ToList();
     Trainer    = trainer.Clone();
     _context   = context ?? new MLContext();
     AddNormalizationTransforms();
 }
Пример #2
0
        /// <summary>
        /// Given a predictor type & target max num of iterations, return a set of all permissible trainers (with their sweeper params, if defined).
        /// </summary>
        /// <returns>Array of viable learners.</returns>
        public static IEnumerable <SuggestedTrainer> AllowedTrainers(MLContext mlContext, TaskKind task,
                                                                     int maxIterations)
        {
            var trainerExtensions = TrainerExtensionCatalog.GetTrainers(task, maxIterations);

            var trainers = new List <SuggestedTrainer>();

            foreach (var trainerExtension in trainerExtensions)
            {
                var learner = new SuggestedTrainer(mlContext, trainerExtension);
                trainers.Add(learner);
            }
            return(trainers.ToArray());
        }
Пример #3
0
        private static void SampleHyperparameters(SuggestedTrainer trainer, IEnumerable <PipelineRunResult> history, bool isMaximizingMetric)
        {
            var sps     = ConvertToValueGenerators(trainer.SweepParams);
            var sweeper = new SmacSweeper(
                new SmacSweeper.Arguments
            {
                SweptParameters = sps
            });

            IEnumerable <PipelineRunResult> historyToUse = history
                                                           .Where(r => r.RunSucceded && r.Pipeline.Trainer.TrainerName == trainer.TrainerName && r.Pipeline.Trainer.HyperParamSet != null);

            // get new set of hyperparameter values
            var proposedParamSet = sweeper.ProposeSweeps(1, historyToUse.Select(h => h.ToRunResult(isMaximizingMetric))).First();

            // associate proposed param set with trainer, so that smart hyperparam
            // sweepers (like KDO) can map them back.
            trainer.SetHyperparamValues(proposedParamSet);
        }