Exemple #1
0
 public Experiment(MLContext context,
                   TaskKind task,
                   OptimizingMetricInfo metricInfo,
                   IProgress <TRunDetail> progressCallback,
                   ExperimentSettings experimentSettings,
                   IMetricsAgent <TMetrics> metricsAgent,
                   IEnumerable <TrainerName> trainerAllowList,
                   DatasetColumnInfo[] datasetColumnInfo,
                   IRunner <TRunDetail> runner,
                   IChannel logger)
 {
     _context = context;
     _history = new List <SuggestedPipelineRunDetail>();
     _optimizingMetricInfo = metricInfo;
     _task                   = task;
     _progressCallback       = progressCallback;
     _experimentSettings     = experimentSettings;
     _metricsAgent           = metricsAgent;
     _trainerAllowList       = trainerAllowList;
     _modelDirectory         = GetModelDirectory(_context.TempFilePath, _experimentSettings.CacheDirectoryName);
     _datasetColumnInfo      = datasetColumnInfo;
     _runner                 = runner;
     _logger                 = logger;
     _experimentTimerExpired = false;
 }
        protected BasicMetrics(IMetricsAgent metricsAgent, ITaskSchedulerFactory taskSchedulerFactory)
        {
            _metricsAgent = metricsAgent;

            _taskScheduler = taskSchedulerFactory.GetTaskScheduler();
            _taskScheduler.ScheduleOnInterval(CollectMemoryAndThreadsUsage, 0, 1000);
        }
        public static CrossValidationRunDetail <TMetrics> GetBestRun <TMetrics>(IEnumerable <CrossValidationRunDetail <TMetrics> > results,
                                                                                IMetricsAgent <TMetrics> metricsAgent, bool isMetricMaximizing)
        {
            results = results.Where(r => r.Results != null && r.Results.Any(x => x.ValidationMetrics != null));
            if (!results.Any())
            {
                return(null);
            }
            var scores           = results.Select(r => r.Results.Average(x => metricsAgent.GetScore(x.ValidationMetrics)));
            var indexOfBestScore = GetIndexOfBestScore(scores, isMetricMaximizing);

            return(results.ElementAt(indexOfBestScore));
        }
Exemple #4
0
 internal ExperimentBase(MLContext context,
                         IMetricsAgent <TMetrics> metricsAgent,
                         OptimizingMetricInfo optimizingMetricInfo,
                         TExperimentSettings settings,
                         TaskKind task,
                         IEnumerable <TrainerName> trainerAllowList)
 {
     Context              = context;
     MetricsAgent         = metricsAgent;
     OptimizingMetricInfo = optimizingMetricInfo;
     Settings             = settings;
     _logger              = ((IChannelProvider)context).Start("AutoML");
     _task             = task;
     _trainerAllowList = trainerAllowList;
 }
Exemple #5
0
        public static CrossValidationRunDetail <TMetrics> GetBestRun <TMetrics>(IEnumerable <CrossValidationRunDetail <TMetrics> > results,
                                                                                IMetricsAgent <TMetrics> metricsAgent, bool isMetricMaximizing)
        {
            results = results.Where(r => r.Results != null && r.Results.Any(x => x.ValidationMetrics != null));
            if (!results.Any())
            {
                return(null);
            }
            var scores           = results.Select(r => r.Results.Average(x => metricsAgent.GetScore(x.ValidationMetrics)));
            var indexOfBestScore = GetIndexOfBestScore(scores, isMetricMaximizing);

            // indexOfBestScore will be -1 if the optimization metric for all models is NaN.
            // In this case, return the first model.
            indexOfBestScore = indexOfBestScore != -1 ? indexOfBestScore : 0;
            return(results.ElementAt(indexOfBestScore));
        }
Exemple #6
0
 public CrossValRunner(MLContext context,
                       IDataView[] trainDatasets,
                       IDataView[] validDatasets,
                       IMetricsAgent <TMetrics> metricsAgent,
                       IEstimator <ITransformer> preFeaturizer,
                       ITransformer[] preprocessorTransforms,
                       string labelColumn,
                       IChannel logger)
 {
     _context                = context;
     _trainDatasets          = trainDatasets;
     _validDatasets          = validDatasets;
     _metricsAgent           = metricsAgent;
     _preFeaturizer          = preFeaturizer;
     _preprocessorTransforms = preprocessorTransforms;
     _labelColumn            = labelColumn;
     _logger           = logger;
     _modelInputSchema = trainDatasets[0].Schema;
 }
 public TrainValidateRunner(MLContext context,
                            IDataView trainData,
                            IDataView validData,
                            string labelColumn,
                            IMetricsAgent <TMetrics> metricsAgent,
                            IEstimator <ITransformer> preFeaturizer,
                            ITransformer preprocessorTransform,
                            IChannel logger)
 {
     _context               = context;
     _trainData             = trainData;
     _validData             = validData;
     _labelColumn           = labelColumn;
     _metricsAgent          = metricsAgent;
     _preFeaturizer         = preFeaturizer;
     _preprocessorTransform = preprocessorTransform;
     _logger           = logger;
     _modelInputSchema = trainData.Schema;
 }
Exemple #8
0
 public GameMetrics(IMetricsAgent metricsAgent, ITaskSchedulerFactory taskSchedulerFactory) : base(metricsAgent,
                                                                                                   taskSchedulerFactory)
 {
 }
Exemple #9
0
        private static bool IsPerfectModel <TMetrics>(IMetricsAgent <TMetrics> metricsAgent, TMetrics metrics)
        {
            var score = metricsAgent.GetScore(metrics);

            return(metricsAgent.IsModelPerfect(score));
        }