public override RegressionTree TrainingIteration(IChannel ch, bool[] activeFeatures)
        {
            Contracts.CheckValue(ch, nameof(ch));
            AgdScoreTracker trainingScores = TrainingScores as AgdScoreTracker;
            //First Let's make XK=YK as we want to fit YK and LineSearch YK
            // and call base class that uses fits XK (in our case will fir YK thanks to the swap)
            var xk = trainingScores.XK;

            trainingScores.XK = trainingScores.YK;
            trainingScores.YK = null;

            //Invoke standard gradient descent on YK rather than XK(Scores)
            RegressionTree tree = base.TrainingIteration(ch, activeFeatures);

            //Reverse the XK/YK swap
            trainingScores.YK = trainingScores.XK;
            trainingScores.XK = xk;

            if (tree == null)
            {
                return(null); // No tree was actually learnt. Give up.
            }
            // ... and update the training scores that we omitted from update
            // in AcceleratedGradientDescent.UpdateScores
            // Here we could use faster way of comuting train scores taking advantage of scores precompited by LineSearch
            // But that would make the code here even more difficult/complex
            trainingScores.AddScores(tree, TreeLearner.Partitioning, 1.0);

            //Now rescale all previous trees based on ratio of new_desired_tree_scale/previous_tree_scale
            for (int t = 0; t < Ensemble.NumTrees - 1; t++)
            {
                Ensemble.GetTreeAt(t).ScaleOutputsBy(AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees) / AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees - 1));
            }
            return(tree);
        }
Example #2
0
        // Divides output values of leaves to bag count.
        // This brings back the final scores generated by model on a same
        // range as when we didn't use bagging
        public void ScaleEnsembleLeaves(int numTrees, int bagSize, Ensemble ensemble)
        {
            int bagCount = GetBagCount(numTrees, bagSize);

            for (int t = 0; t < ensemble.NumTrees; t++)
            {
                RegressionTree tree = ensemble.GetTreeAt(t);
                tree.ScaleOutputsBy(1.0 / bagCount);
            }
        }
 public override void FinalizeLearning(int bestIteration)
 {
     if (bestIteration != Ensemble.NumTrees)
     {
         // Restore multiplier for each tree as it was set during bestIteration
         for (int t = 0; t < bestIteration; t++)
         {
             Ensemble.GetTreeAt(t).ScaleOutputsBy(AgdScoreTracker.TreeMultiplier(t, bestIteration) / AgdScoreTracker.TreeMultiplier(t, Ensemble.NumTrees));
         }
     }
     base.FinalizeLearning(bestIteration);
 }
Example #4
0
 private protected virtual double[] GetGradient(IChannel ch)
 {
     Contracts.AssertValue(ch);
     if (DropoutRate > 0)
     {
         if (_droppedScores == null)
         {
             _droppedScores = new double[TrainingScores.Scores.Length];
         }
         else
         {
             Array.Clear(_droppedScores, 0, _droppedScores.Length);
         }
         if (_scores == null)
         {
             _scores = new double[TrainingScores.Scores.Length];
         }
         int   numberOfTrees = Ensemble.NumTrees;
         int[] droppedTrees  =
             Enumerable.Range(0, numberOfTrees).Where(t => (DropoutRng.NextDouble() < DropoutRate)).ToArray();
         _numberOfDroppedTrees = droppedTrees.Length;
         if ((_numberOfDroppedTrees == 0) && (numberOfTrees > 0))
         {
             droppedTrees = new int[] { DropoutRng.Next(numberOfTrees) };
             // force at least a single tree to be dropped
             _numberOfDroppedTrees = droppedTrees.Length;
         }
         ch.Trace("dropout: Dropping {0} trees of {1} for rate {2}",
                  _numberOfDroppedTrees, numberOfTrees, DropoutRate);
         foreach (int i in droppedTrees)
         {
             double[] s = _treeScores[i];
             for (int j = 0; j < _droppedScores.Length; j++)
             {
                 _droppedScores[j] += s[j];                                     // summing up the weights of the dropped tree
                 s[j] *= _numberOfDroppedTrees / (1.0 + _numberOfDroppedTrees); // rescaling the dropped tree
             }
             Ensemble.GetTreeAt(i).ScaleOutputsBy(_numberOfDroppedTrees / (1.0 + _numberOfDroppedTrees));
         }
         for (int j = 0; j < _scores.Length; j++)
         {
             _scores[j] = TrainingScores.Scores[j] - _droppedScores[j];
             TrainingScores.Scores[j] -= _droppedScores[j] / (1.0 + _numberOfDroppedTrees);
         }
         return(ObjectiveFunction.GetGradient(ch, _scores));
     }
     else
     {
         return(ObjectiveFunction.GetGradient(ch, TrainingScores.Scores));
     }
 }
Example #5
0
        private Ensemble GetEnsembleFromSolution(LassoFit fit, int solutionIdx, Ensemble originalEnsemble)
        {
            Ensemble ensemble = new Ensemble();

            int weightsCount = fit.NumberOfWeights[solutionIdx];

            for (int i = 0; i < weightsCount; i++)
            {
                double weight = fit.CompressedWeights[solutionIdx][i];
                if (weight != 0)
                {
                    RegressionTree tree = originalEnsemble.GetTreeAt(fit.Indices[i]);
                    tree.Weight = weight;
                    ensemble.AddTree(tree);
                }
            }

            ensemble.Bias = fit.Intercepts[solutionIdx];
            return(ensemble);
        }