public override RegressionTree TrainingIteration(IChannel ch, bool[] activeFeatures)
        {
            Contracts.CheckValue(ch, nameof(ch));
            AgdScoreTracker trainingScores = TrainingScores as AgdScoreTracker;
            //First Let's make XK=YK as we want to fit YK and LineSearch YK
            // and call base class that uses fits XK (in our case will fir YK thanks to the swap)
            var xk = trainingScores.XK;

            trainingScores.XK = trainingScores.YK;
            trainingScores.YK = null;

            //Invoke standard gradient descent on YK rather than XK(Scores)
            RegressionTree tree = base.TrainingIteration(ch, activeFeatures);

            //Reverse the XK/YK swap
            trainingScores.YK = trainingScores.XK;
            trainingScores.XK = xk;

            if (tree == null)
            {
                return(null); // No tree was actually learnt. Give up.
            }
            // ... and update the training scores that we omitted from update
            // in AcceleratedGradientDescent.UpdateScores
            // Here we could use faster way of comuting train scores taking advantage of scores precompited by LineSearch
            // But that would make the code here even more difficult/complex
            trainingScores.AddScores(tree, TreeLearner.Partitioning, 1.0);

            //Now rescale all previous trees based on ratio of new_desired_tree_scale/previous_tree_scale
            for (int t = 0; t < Ensemble.NumTrees - 1; t++)
            {
                Ensemble.GetTreeAt(t).ScaleOutputsBy(AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees) / AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees - 1));
            }
            return(tree);
        }
Exemplo n.º 2
0
 public OptimizationAlgorithm(Ensemble ensemble, Dataset trainData, double[] initTrainScores)
 {
     Ensemble       = ensemble;
     TrainingScores = ConstructScoreTracker("train", trainData, initTrainScores);
     TrackedScores  = new List <ScoreTracker>();
     TrackedScores.Add(TrainingScores);
     DropoutRng = new Random();
     UseFastTrainingScoresUpdate = true;
 }
Exemplo n.º 3
0
        internal override InternalRegressionTree TrainingIteration(IChannel ch, bool[] activeFeatures)
        {
            Contracts.CheckValue(ch, nameof(ch));
            // Fit a regression tree to the gradient using least squares.
            InternalRegressionTree tree = TreeLearner.FitTargets(ch, activeFeatures, AdjustTargetsAndSetWeights(ch));

            if (tree == null)
            {
                return(null); // Could not learn a tree. Exit.
            }
            // Adjust output values of tree by performing a Newton step.

            // REVIEW: This should be part of OptimizingAlgorithm.
            using (Timer.Time(TimerEvent.TreeLearnerAdjustTreeOutputs))
            {
                double[] backupScores = null;
                // when doing dropouts we need to replace the TrainingScores with the scores without the dropped trees
                if (DropoutRate > 0)
                {
                    backupScores          = TrainingScores.Scores;
                    TrainingScores.Scores = _scores;
                }

                if (AdjustTreeOutputsOverride != null)
                {
                    AdjustTreeOutputsOverride.AdjustTreeOutputs(ch, tree, TreeLearner.Partitioning, TrainingScores);
                }
                else if (ObjectiveFunction is IStepSearch)
                {
                    (ObjectiveFunction as IStepSearch).AdjustTreeOutputs(ch, tree, TreeLearner.Partitioning, TrainingScores);
                }
                else
                {
                    throw ch.Except("No AdjustTreeOutputs defined. Objective function should define IStepSearch or AdjustTreeOutputsOverride should be set");
                }
                if (DropoutRate > 0)
                {
                    // Returning the original scores.
                    TrainingScores.Scores = backupScores;
                }
            }
            if (Smoothing != 0.0)
            {
                SmoothTree(tree, Smoothing);
                UseFastTrainingScoresUpdate = false;
            }
            if (DropoutRate > 0)
            {
                // Don't do shrinkage if you do dropouts.
                double scaling = (1.0 / (1.0 + _numberOfDroppedTrees));
                tree.ScaleOutputsBy(scaling);
                _treeScores.Add(tree.GetOutputs(TrainingScores.Dataset));
            }
            UpdateAllScores(ch, tree);
            Ensemble.AddTree(tree);
            return(tree);
        }
Exemplo n.º 4
0
        // Divides output values of leaves to bag count.
        // This brings back the final scores generated by model on a same
        // range as when we didn't use bagging
        public void ScaleEnsembleLeaves(int numTrees, int bagSize, Ensemble ensemble)
        {
            int bagCount = GetBagCount(numTrees, bagSize);

            for (int t = 0; t < ensemble.NumTrees; t++)
            {
                RegressionTree tree = ensemble.GetTreeAt(t);
                tree.ScaleOutputsBy(1.0 / bagCount);
            }
        }
 public override void FinalizeLearning(int bestIteration)
 {
     if (bestIteration != Ensemble.NumTrees)
     {
         // Restore multiplier for each tree as it was set during bestIteration
         for (int t = 0; t < bestIteration; t++)
         {
             Ensemble.GetTreeAt(t).ScaleOutputsBy(AgdScoreTracker.TreeMultiplier(t, bestIteration) / AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees));
         }
     }
     base.FinalizeLearning(bestIteration);
 }
Exemplo n.º 6
0
 private protected virtual double[] GetGradient(IChannel ch)
 {
     Contracts.AssertValue(ch);
     if (DropoutRate > 0)
     {
         if (_droppedScores == null)
         {
             _droppedScores = new double[TrainingScores.Scores.Length];
         }
         else
         {
             Array.Clear(_droppedScores, 0, _droppedScores.Length);
         }
         if (_scores == null)
         {
             _scores = new double[TrainingScores.Scores.Length];
         }
         int   numberOfTrees = Ensemble.NumTrees;
         int[] droppedTrees  =
             Enumerable.Range(0, numberOfTrees).Where(t => (DropoutRng.NextDouble() < DropoutRate)).ToArray();
         _numberOfDroppedTrees = droppedTrees.Length;
         if ((_numberOfDroppedTrees == 0) && (numberOfTrees > 0))
         {
             droppedTrees = new int[] { DropoutRng.Next(numberOfTrees) };
             // force at least a single tree to be dropped
             _numberOfDroppedTrees = droppedTrees.Length;
         }
         ch.Trace("dropout: Dropping {0} trees of {1} for rate {2}",
                  _numberOfDroppedTrees, numberOfTrees, DropoutRate);
         foreach (int i in droppedTrees)
         {
             double[] s = _treeScores[i];
             for (int j = 0; j < _droppedScores.Length; j++)
             {
                 _droppedScores[j] += s[j];                                     // summing up the weights of the dropped tree
                 s[j] *= _numberOfDroppedTrees / (1.0 + _numberOfDroppedTrees); // rescaling the dropped tree
             }
             Ensemble.GetTreeAt(i).ScaleOutputsBy(_numberOfDroppedTrees / (1.0 + _numberOfDroppedTrees));
         }
         for (int j = 0; j < _scores.Length; j++)
         {
             _scores[j] = TrainingScores.Scores[j] - _droppedScores[j];
             TrainingScores.Scores[j] -= _droppedScores[j] / (1.0 + _numberOfDroppedTrees);
         }
         return(ObjectiveFunction.GetGradient(ch, _scores));
     }
     else
     {
         return(ObjectiveFunction.GetGradient(ch, TrainingScores.Scores));
     }
 }
Exemplo n.º 7
0
        public override RegressionTree TrainingIteration(IChannel ch, bool[] activeFeatures)
        {
            Contracts.CheckValue(ch, nameof(ch));

            double[]       sampleWeights   = null;
            double[]       targets         = GetGradient(ch);
            double[]       weightedTargets = _gradientWrapper.AdjustTargetAndSetWeights(targets, ObjectiveFunction, out sampleWeights);
            RegressionTree tree            = ((RandomForestLeastSquaresTreeLearner)TreeLearner).FitTargets(ch, activeFeatures, weightedTargets,
                                                                                                           targets, sampleWeights);

            if (tree != null)
            {
                Ensemble.AddTree(tree);
            }
            return(tree);
        }
Exemplo n.º 8
0
        public bool Compress(IChannel ch, Ensemble ensemble, double[] trainScores, int bestIteration, int maxTreesAfterCompression)
        {
            LoadTargets(trainScores, bestIteration);

            LassoFit fit = GetLassoFit(ch, maxTreesAfterCompression);
            int      numberOfSolutions = fit.NumberOfLambdas;
            int      bestSolutionIdx   = 0;

            ch.Info("Compression R2 values:");
            for (int i = 0; i < numberOfSolutions; i++)
            {
                ch.Info("Solution {0}:\t{1}\t{2}", i + 1, fit.NonZeroWeights[i], fit.Rsquared[i]);
            }
            bestSolutionIdx     = numberOfSolutions - 1;
            _compressedEnsemble = GetEnsembleFromSolution(fit, bestSolutionIdx, ensemble);
            return(true);
        }
Exemplo n.º 9
0
        private Ensemble GetEnsembleFromSolution(LassoFit fit, int solutionIdx, Ensemble originalEnsemble)
        {
            Ensemble ensemble = new Ensemble();

            int weightsCount = fit.NumberOfWeights[solutionIdx];

            for (int i = 0; i < weightsCount; i++)
            {
                double weight = fit.CompressedWeights[solutionIdx][i];
                if (weight != 0)
                {
                    RegressionTree tree = originalEnsemble.GetTreeAt(fit.Indices[i]);
                    tree.Weight = weight;
                    ensemble.AddTree(tree);
                }
            }

            ensemble.Bias = fit.Intercepts[solutionIdx];
            return(ensemble);
        }
 // REVIEW: When the FastTree appliation is decoupled with tree learner and boosting logic, this class should be removed.
 public RandomForestOptimizer(Ensemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper)
     : base(ensemble, trainData, initTrainScores, gradientWrapper)
 {
     _gradientWrapper = gradientWrapper;
 }
 public AcceleratedGradientDescent(Ensemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper)
     : base(ensemble, trainData, initTrainScores, gradientWrapper)
 {
     UseFastTrainingScoresUpdate = false;
 }
Exemplo n.º 12
0
 public GradientDescent(Ensemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper)
     : base(ensemble, trainData, initTrainScores)
 {
     _gradientWrapper = gradientWrapper;
     _treeScores      = new List <double[]>();
 }
Exemplo n.º 13
0
 public ConjugateGradientDescent(Ensemble ensemble, Dataset trainData, double[] initTrainScores, IGradientAdjuster gradientWrapper)
     : base(ensemble, trainData, initTrainScores, gradientWrapper)
 {
     _currentDk = new double[trainData.NumDocs];
 }
Exemplo n.º 14
0
        public IPredictor CombineModels(IEnumerable<IPredictor> models)
        {
            _host.CheckValue(models, nameof(models));

            var ensemble = new Ensemble();
            int modelCount = 0;
            int featureCount = -1;
            bool binaryClassifier = false;
            foreach (var model in models)
            {
                modelCount++;

                var predictor = model;
                _host.CheckValue(predictor, nameof(models), "One of the models is null");

                var calibrated = predictor as CalibratedPredictorBase;
                double paramA = 1;
                if (calibrated != null)
                {
                    _host.Check(calibrated.Calibrator is PlattCalibrator,
                        "Combining FastTree models can only be done when the models are calibrated with Platt calibrator");
                    predictor = calibrated.SubPredictor;
                    paramA = -(calibrated.Calibrator as PlattCalibrator).ParamA;
                }
                var tree = predictor as FastTreePredictionWrapper;
                if (tree == null)
                    throw _host.Except("Model is not a tree ensemble");
                foreach (var t in tree.TrainedEnsemble.Trees)
                {
                    var bytes = new byte[t.SizeInBytes()];
                    int position = -1;
                    t.ToByteArray(bytes, ref position);
                    position = -1;
                    var tNew = new RegressionTree(bytes, ref position);
                    if (paramA != 1)
                    {
                        for (int i = 0; i < tNew.NumLeaves; i++)
                            tNew.SetOutput(i, tNew.LeafValues[i] * paramA);
                    }
                    ensemble.AddTree(tNew);
                }

                if (modelCount == 1)
                {
                    binaryClassifier = calibrated != null;
                    featureCount = tree.InputType.ValueCount;
                }
                else
                {
                    _host.Check((calibrated != null) == binaryClassifier, "Ensemble contains both calibrated and uncalibrated models");
                    _host.Check(featureCount == tree.InputType.ValueCount, "Found models with different number of features");
                }
            }

            var scale = 1 / (double)modelCount;

            foreach (var t in ensemble.Trees)
            {
                for (int i = 0; i < t.NumLeaves; i++)
                    t.SetOutput(i, t.LeafValues[i] * scale);
            }

            switch (_kind)
            {
                case PredictionKind.BinaryClassification:
                    if (!binaryClassifier)
                        return new FastTreeBinaryPredictor(_host, ensemble, featureCount, null);

                    var cali = new PlattCalibrator(_host, -1, 0);
                    return new FeatureWeightsCalibratedPredictor(_host, new FastTreeBinaryPredictor(_host, ensemble, featureCount, null), cali);
                case PredictionKind.Regression:
                    return new FastTreeRegressionPredictor(_host, ensemble, featureCount, null);
                case PredictionKind.Ranking:
                    return new FastTreeRankingPredictor(_host, ensemble, featureCount, null);
                default:
                    _host.Assert(false);
                    throw _host.ExceptNotSupp();
            }
        }