/// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="groupIdColumnName">Name for the GroupId column.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, string groupIdColumnName = "GroupId")
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
        /// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="optimizationMetricTruncationLevel">Maximum truncation level for computing (N)DCG. Defaults to 10.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, uint optimizationMetricTruncationLevel = 10)
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, optimizationMetricTruncationLevel);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
        /// <summary>
        /// Select the best run from an enumeration of experiment cross validation runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment cross validation run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <returns>The best experiment run.</returns>
        public static CrossValidationRunDetail <RegressionMetrics> Best(this IEnumerable <CrossValidationRunDetail <RegressionMetrics> > results, RegressionMetric metric = RegressionMetric.RSquared)
        {
            var metricsAgent       = new RegressionMetricsAgent(null, metric);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
示例#4
0
 public Experiment(MLContext context,
                   TaskKind task,
                   OptimizingMetricInfo metricInfo,
                   IProgress <TRunDetail> progressCallback,
                   ExperimentSettings experimentSettings,
                   IMetricsAgent <TMetrics> metricsAgent,
                   IEnumerable <TrainerName> trainerAllowList,
                   DatasetColumnInfo[] datasetColumnInfo,
                   IRunner <TRunDetail> runner,
                   IChannel logger)
 {
     _context = context;
     _history = new List <SuggestedPipelineRunDetail>();
     _optimizingMetricInfo = metricInfo;
     _task                   = task;
     _progressCallback       = progressCallback;
     _experimentSettings     = experimentSettings;
     _metricsAgent           = metricsAgent;
     _trainerAllowList       = trainerAllowList;
     _modelDirectory         = GetModelDirectory(_context.TempFilePath, _experimentSettings.CacheDirectoryName);
     _datasetColumnInfo      = datasetColumnInfo;
     _runner                 = runner;
     _logger                 = logger;
     _experimentTimerExpired = false;
 }
示例#5
0
        public static RunDetail <MulticlassClassificationMetrics> GetBestRun(IEnumerable <RunDetail <MulticlassClassificationMetrics> > results,
                                                                             MulticlassClassificationMetric metric)
        {
            var metricsAgent = new MultiMetricsAgent(null, metric);
            var metricInfo   = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
        public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, string groupIdColumnName)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var metricInfo   = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
示例#7
0
        public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, uint dcgTruncationLevel)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, dcgTruncationLevel);

            var metricInfo = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
示例#8
0
 internal ExperimentBase(MLContext context,
                         IMetricsAgent <TMetrics> metricsAgent,
                         OptimizingMetricInfo optimizingMetricInfo,
                         TExperimentSettings settings,
                         TaskKind task,
                         IEnumerable <TrainerName> trainerAllowList)
 {
     Context              = context;
     MetricsAgent         = metricsAgent;
     OptimizingMetricInfo = optimizingMetricInfo;
     Settings             = settings;
     _logger              = ((IChannelProvider)context).Start("AutoML");
     _task             = task;
     _trainerAllowList = trainerAllowList;
 }
 public CrossValSummaryRunner(MLContext context,
                              IDataView[] trainDatasets,
                              IDataView[] validDatasets,
                              IMetricsAgent <TMetrics> metricsAgent,
                              IEstimator <ITransformer> preFeaturizer,
                              ITransformer[] preprocessorTransforms,
                              string labelColumn,
                              OptimizingMetricInfo optimizingMetricInfo,
                              IChannel logger)
 {
     _context                = context;
     _trainDatasets          = trainDatasets;
     _validDatasets          = validDatasets;
     _metricsAgent           = metricsAgent;
     _preFeaturizer          = preFeaturizer;
     _preprocessorTransforms = preprocessorTransforms;
     _labelColumn            = labelColumn;
     _optimizingMetricInfo   = optimizingMetricInfo;
     _logger           = logger;
     _modelInputSchema = trainDatasets[0].Schema;
 }