public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, string groupIdColumnName)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var metricInfo   = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
Example #2
0
        public static RunDetail <RankingMetrics> GetBestRun(IEnumerable <RunDetail <RankingMetrics> > results,
                                                            RankingMetric metric, uint dcgTruncationLevel)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, dcgTruncationLevel);

            var metricInfo = new OptimizingMetricInfo(metric);

            return(GetBestRun(results, metricsAgent, metricInfo.IsMaximizing));
        }
        public RankingMetricsAgent(MLContext mlContext, RankingMetric metric, uint optimizationMetricTruncationLevel)
        {
            _mlContext        = mlContext;
            _optimizingMetric = metric;

            if (optimizationMetricTruncationLevel <= 0)
            {
                throw _mlContext.ExceptUserArg(nameof(optimizationMetricTruncationLevel), "DCG Truncation Level must be greater than 0");
            }

            // We want to make sure we always report metrics for at least 10 results (e.g. NDCG@10) to the user.
            // Producing extra results adds no measurable performance impact, so we report at least 2x of the
            // user's requested optimization truncation level.
            _dcgTruncationLevel = optimizationMetricTruncationLevel;
        }
        /// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="groupIdColumnName">Name for the GroupId column.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, string groupIdColumnName = "GroupId")
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, groupIdColumnName);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
 public RankingMetricsAgent(MLContext mlContext, RankingMetric optimizingMetric, string groupIdColumnName)
 {
     _mlContext         = mlContext;
     _optimizingMetric  = optimizingMetric;
     _groupIdColumnName = groupIdColumnName;
 }
Example #6
0
        private static bool IsPerfectModel(RankingMetrics metrics, RankingMetric metric, uint dcgTruncationLevel)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, dcgTruncationLevel);

            return(IsPerfectModel(metricsAgent, metrics));
        }
Example #7
0
 private static double GetScore(RankingMetrics metrics, RankingMetric metric, uint dcgTruncationLevel)
 {
     return(new RankingMetricsAgent(null, metric, dcgTruncationLevel).GetScore(metrics));
 }
        /// <summary>
        /// Select the best run from an enumeration of experiment runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <param name="optimizationMetricTruncationLevel">Maximum truncation level for computing (N)DCG. Defaults to 10.</param>
        /// <returns>The best experiment run.</returns>
        public static RunDetail <RankingMetrics> Best(this IEnumerable <RunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg, uint optimizationMetricTruncationLevel = 10)
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric, optimizationMetricTruncationLevel);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }
Example #9
0
        private static bool IsPerfectModel(RankingMetrics metrics, RankingMetric metric)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric);

            return(IsPerfectModel(metricsAgent, metrics));
        }
Example #10
0
 private static double GetScore(RankingMetrics metrics, RankingMetric metric)
 {
     return(new RankingMetricsAgent(null, metric).GetScore(metrics));
 }
        private static bool IsPerfectModel(RankingMetrics metrics, RankingMetric metric, string groupIdColumnName)
        {
            var metricsAgent = new RankingMetricsAgent(null, metric, groupIdColumnName);

            return(IsPerfectModel(metricsAgent, metrics));
        }
 private static double GetScore(RankingMetrics metrics, RankingMetric metric, string groupIdColumnName)
 {
     return(new RankingMetricsAgent(null, metric, groupIdColumnName).GetScore(metrics));
 }
Example #13
0
 public RankingMetricsAgent(MLContext mlContext, RankingMetric optimizingMetric)
 {
     _mlContext        = mlContext;
     _optimizingMetric = optimizingMetric;
 }
 public OptimizingMetricInfo(RankingMetric rankingMetric)
 {
     IsMaximizing = true;
 }
Example #15
0
        /// <summary>
        /// Select the best run from an enumeration of experiment cross validation runs.
        /// </summary>
        /// <param name="results">Enumeration of AutoML experiment cross validation run results.</param>
        /// <param name="metric">Metric to consider when selecting the best run.</param>
        /// <returns>The best experiment run.</returns>
        public static CrossValidationRunDetail <RankingMetrics> Best(this IEnumerable <CrossValidationRunDetail <RankingMetrics> > results, RankingMetric metric = RankingMetric.Ndcg)
        {
            var metricsAgent       = new RankingMetricsAgent(null, metric);
            var isMetricMaximizing = new OptimizingMetricInfo(metric).IsMaximizing;

            return(BestResultUtil.GetBestRun(results, metricsAgent, isMetricMaximizing));
        }