private GradientBoostedTreesModel(GradientBoostedTreesModel original, Cloner cloner)
     : base(original, cloner)
 {
     this.weights = new List <double>(original.weights);
     this.models  = new List <IRegressionModel>(original.models.Select(m => cloner.Clone(m)));
     this.isCompatibilityLoaded = original.isCompatibilityLoaded;
 }
            public IRegressionModel GetModel()
            {
#pragma warning disable 618
                var model = new GradientBoostedTreesModel(models, weights);
#pragma warning restore 618
                // we don't know the number of iterations here but the number of weights is equal
                // to the number of iterations + 1 (for the constant model)
                // wrap the actual model in a surrogate that enables persistence and lazy recalculation of the model if necessary
                return(new GradientBoostedTreesModelSurrogate(problemData, randSeed, lossFunction, weights.Count - 1, maxSize, r, m, nu, model));
            }
      public IRegressionModel GetModel() {
#pragma warning disable 618
        var model = new GradientBoostedTreesModel(models, weights);
#pragma warning restore 618
        // we don't know the number of iterations here but the number of weights is equal 
        // to the number of iterations + 1 (for the constant model)
        // wrap the actual model in a surrogate that enables persistence and lazy recalculation of the model if necessary
        return new GradientBoostedTreesModelSurrogate(problemData, randSeed, lossFunction, weights.Count - 1, maxSize, r, m, nu, model);
      }
 private GradientBoostedTreesModel(GradientBoostedTreesModel original, Cloner cloner)
   : base(original, cloner) {
   this.weights = new List<double>(original.weights);
   this.models = new List<IRegressionModel>(original.models.Select(m => cloner.Clone(m)));
   this.isCompatibilityLoaded = original.isCompatibilityLoaded;
 }