/// <summary> /// Samples new hyperparameters for the trainer, and sets them. /// Returns true if success (new hyperparameters were suggested and set). Else, returns false. /// </summary> private static bool SampleHyperparameters(MLContext context, SuggestedTrainer trainer, IEnumerable <SuggestedPipelineRunDetail> history, bool isMaximizingMetric) { var sps = ConvertToValueGenerators(trainer.SweepParams); var sweeper = new SmacSweeper(context, new SmacSweeper.Arguments { SweptParameters = sps }); IEnumerable <SuggestedPipelineRunDetail> historyToUse = history .Where(r => r.RunSucceeded && r.Pipeline.Trainer.TrainerName == trainer.TrainerName && r.Pipeline.Trainer.HyperParamSet != null && r.Pipeline.Trainer.HyperParamSet.Any()); // get new set of hyperparameter values var proposedParamSet = sweeper.ProposeSweeps(1, historyToUse.Select(h => h.ToRunResult(isMaximizingMetric))).First(); if (!proposedParamSet.Any()) { return(false); } // associate proposed parameter set with trainer, so that smart hyperparameter // sweepers (like KDO) can map them back. trainer.SetHyperparamValues(proposedParamSet); return(true); }
/// <summary> /// Samples new hyperparameters for the trainer, and sets them. /// Returns true if success (new hyperparameters were suggested and set). Else, returns false. /// </summary> private static bool SampleHyperparameters(MLContext context, SuggestedTrainer trainer, IEnumerable <SuggestedPipelineRunDetail> history, bool isMaximizingMetric, IChannel logger) { try { var sps = ConvertToValueGenerators(trainer.SweepParams); var sweeper = new SmacSweeper(context, new SmacSweeper.Arguments { SweptParameters = sps }); IEnumerable <SuggestedPipelineRunDetail> historyToUse = history .Where(r => r.RunSucceeded && r.Pipeline.Trainer.TrainerName == trainer.TrainerName && r.Pipeline.Trainer.HyperParamSet != null && r.Pipeline.Trainer.HyperParamSet.Any() && FloatUtils.IsFinite(r.Score)); // get new set of hyperparameter values var proposedParamSet = sweeper.ProposeSweeps(1, historyToUse.Select(h => h.ToRunResult(isMaximizingMetric))).FirstOrDefault(); if (!proposedParamSet.Any()) { return(false); } // associate proposed parameter set with trainer, so that smart hyperparameter // sweepers (like KDO) can map them back. trainer.SetHyperparamValues(proposedParamSet); return(true); } catch (Exception ex) { logger.Error($"SampleHyperparameters failed with exception: {ex}"); throw; } }