Exemplo n.º 1
0
        public void FastForestRegressorEstimator()
        {
            // Pipeline.
            var pipeline = new FastForestRegression(Env, "Label", "Features", advancedSettings: s => {
                s.BaggingSize = 2;
                s.NumTrees    = 10;
            });

            TestEstimatorCore(pipeline, GetRegressionPipeline());
            Done();
        }
Exemplo n.º 2
0
        public void FastForestRegressorEstimator()
        {
            var dataView = GetRegressionPipeline();
            var trainer  = new FastForestRegression(Env, "Label", "Features", advancedSettings: s =>
            {
                s.BaggingSize = 2;
                s.NumTrees    = 10;
            });

            TestEstimatorCore(trainer, dataView);
            var model = trainer.Train(dataView, dataView);

            Done();
        }
        private FastForestRegressionPredictor FitModel(IEnumerable <IRunResult> previousRuns)
        {
            Single[]   targets  = new Single[previousRuns.Count()];
            Single[][] features = new Single[previousRuns.Count()][];

            int i = 0;

            foreach (RunResult r in previousRuns)
            {
                features[i] = SweeperProbabilityUtils.ParameterSetAsFloatArray(_host, _sweepParameters, r.ParameterSet, true);
                targets[i]  = (Float)r.MetricValue;
                i++;
            }

            ArrayDataViewBuilder dvBuilder = new ArrayDataViewBuilder(_host);

            dvBuilder.AddColumn("Label", NumberType.Float, targets);
            dvBuilder.AddColumn("Features", NumberType.Float, features);

            IDataView view = dvBuilder.GetDataView();

            _host.Assert(view.GetRowCount() == targets.Length, "This data view will have as many rows as there have been evaluations");
            RoleMappedData data = TrainUtils.CreateExamples(view, "Label", "Features");

            using (IChannel ch = _host.Start("Single training"))
            {
                // Set relevant random forest arguments.
                FastForestRegression.Arguments args = new FastForestRegression.Arguments();
                args.FeatureFraction     = _args.SplitRatio;
                args.NumTrees            = _args.NumOfTrees;
                args.MinDocumentsInLeafs = _args.NMinForSplit;

                // Train random forest.
                FastForestRegression trainer = new FastForestRegression(_host, args);
                trainer.Train(data);
                FastForestRegressionPredictor predictor = trainer.CreatePredictor();

                // Return random forest predictor.
                ch.Done();
                return(predictor);
            }
        }
Exemplo n.º 4
0
        private FastForestRegressionModelParameters FitModel(IEnumerable <IRunResult> previousRuns)
        {
            Single[]   targets  = new Single[previousRuns.Count()];
            Single[][] features = new Single[previousRuns.Count()][];

            int i = 0;

            foreach (RunResult r in previousRuns)
            {
                features[i] = SweeperProbabilityUtils.ParameterSetAsFloatArray(_host, _sweepParameters, r.ParameterSet, true);
                targets[i]  = (float)r.MetricValue;
                i++;
            }

            ArrayDataViewBuilder dvBuilder = new ArrayDataViewBuilder(_host);

            dvBuilder.AddColumn(DefaultColumnNames.Label, NumberDataViewType.Single, targets);
            dvBuilder.AddColumn(DefaultColumnNames.Features, NumberDataViewType.Single, features);

            IDataView view = dvBuilder.GetDataView();

            _host.Assert(view.GetRowCount() == targets.Length, "This data view will have as many rows as there have been evaluations");

            using (IChannel ch = _host.Start("Single training"))
            {
                // Set relevant random forest arguments.
                // Train random forest.
                var trainer = new FastForestRegression(_host,
                                                       new FastForestRegression.Options
                {
                    FeatureFraction     = _args.SplitRatio,
                    NumTrees            = _args.NumOfTrees,
                    MinDocumentsInLeafs = _args.NMinForSplit,
                    LabelColumnName     = DefaultColumnNames.Label,
                    FeatureColumnName   = DefaultColumnNames.Features,
                });
                var predictor = trainer.Fit(view);

                // Return random forest predictor.
                return(predictor.Model);
            }
        }
Exemplo n.º 5
0
        private FastForestRegressionModelParameters FitModel(IEnumerable <IRunResult> previousRuns)
        {
            Single[]   targets  = new Single[previousRuns.Count()];
            Single[][] features = new Single[previousRuns.Count()][];

            int i = 0;

            foreach (RunResult r in previousRuns)
            {
                features[i] = SweeperProbabilityUtils.ParameterSetAsFloatArray(_sweepParameters, r.ParameterSet, true);
                targets[i]  = (Float)r.MetricValue;
                i++;
            }

            ArrayDataViewBuilder dvBuilder = new ArrayDataViewBuilder(_context);

            dvBuilder.AddColumn(DefaultColumnNames.Label, NumberType.Float, targets);
            dvBuilder.AddColumn(DefaultColumnNames.Features, NumberType.Float, features);

            IDataView data = dvBuilder.GetDataView();

            AutoMlUtils.Assert(data.GetRowCount() == targets.Length, "This data view will have as many rows as there have been evaluations");

            // Set relevant random forest arguments.
            // Train random forest.
            var trainer = new FastForestRegression(_context, DefaultColumnNames.Label, DefaultColumnNames.Features, advancedSettings: s =>
            {
                s.FeatureFraction     = _args.SplitRatio;
                s.NumTrees            = _args.NumOfTrees;
                s.MinDocumentsInLeafs = _args.NMinForSplit;
            });
            var predictor = trainer.Train(data).Model;

            // Return random forest predictor.
            return(predictor);
        }