/// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="groupIdColumnName">Name for the GroupId column.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, string groupIdColumnName = "GroupId")
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
        /// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="optimizationMetricTruncationLevel">Maximum truncation level for computing (N)DCG. Defaults to 10.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, uint optimizationMetricTruncationLevel = 10)
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, optimizationMetricTruncationLevel);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
Пример #3
0
        /// <summary>
        /// Select the best run from an enumeration of experiment cross validation runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment cross validation run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <returns>The best experiment run.</returns>
        public static CrossValidationRunDetail <RankingMetrics> Best(this IEnumerable <CrossValidationRunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg)
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
Пример #4
0
        public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, string groupIdColumnName)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var metricInfo   = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
Пример #5
0
        public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, uint dcgTruncationLevel)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, dcgTruncationLevel);

            var metricInfo = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }