public void GradientBoostingTestTowerRelativeError()
        {
            var gbt        = new GradientBoostedTreesAlgorithm();
            var provider   = new HeuristicLab.Problems.Instances.DataAnalysis.RegressionRealWorldInstanceProvider();
            var instance   = provider.GetDataDescriptors().Single(x => x.Name.Contains("Tower"));
            var regProblem = new RegressionProblem();

            regProblem.Load(provider.LoadData(instance));

            #region Algorithm Configuration
            gbt.Problem         = regProblem;
            gbt.Seed            = 0;
            gbt.SetSeedRandomly = false;
            gbt.Iterations      = 3000;
            gbt.MaxSize         = 20;
            gbt.Nu = 0.005;
            gbt.LossFunctionParameter.Value = gbt.LossFunctionParameter.ValidValues.First(l => l.ToString().Contains("Relative"));
            gbt.CreateSolution = false;
            #endregion

            gbt.Start();

            Console.WriteLine(gbt.ExecutionTime);
            Assert.AreEqual(0.061954221604374943, ((DoubleValue)gbt.Results["Loss (train)"].Value).Value, 1E-6);
            Assert.AreEqual(0.06316303473499961, ((DoubleValue)gbt.Results["Loss (test)"].Value).Value, 1E-6);
        }
Exemplo n.º 2
0
        public void GradientBoostingTestTowerAbsoluteError()
        {
            var gbt        = new GradientBoostedTreesAlgorithm();
            var provider   = new HeuristicLab.Problems.Instances.DataAnalysis.RegressionRealWorldInstanceProvider();
            var instance   = provider.GetDataDescriptors().Single(x => x.Name.Contains("Tower"));
            var regProblem = new RegressionProblem();

            regProblem.Load(provider.LoadData(instance));

            #region Algorithm Configuration
            gbt.Problem         = regProblem;
            gbt.Seed            = 0;
            gbt.SetSeedRandomly = false;
            gbt.Iterations      = 1000;
            gbt.MaxSize         = 20;
            gbt.Nu = 0.02;
            gbt.LossFunctionParameter.Value = gbt.LossFunctionParameter.ValidValues.First(l => l.ToString().Contains("Absolute"));
            gbt.ModelCreation = GradientBoostedTrees.ModelCreation.QualityOnly;
            #endregion

            gbt.Start();

            Console.WriteLine(gbt.ExecutionTime);
            Assert.AreEqual(10.551385044666661, ((DoubleValue)gbt.Results["Loss (train)"].Value).Value, 1E-6);
            Assert.AreEqual(12.918001745581172, ((DoubleValue)gbt.Results["Loss (test)"].Value).Value, 1E-6);
        }
        public void GradientBoostingTestTowerSquaredError()
        {
            var gbt        = new GradientBoostedTreesAlgorithm();
            var provider   = new HeuristicLab.Problems.Instances.DataAnalysis.RegressionRealWorldInstanceProvider();
            var instance   = provider.GetDataDescriptors().Single(x => x.Name.Contains("Tower"));
            var regProblem = new RegressionProblem();

            regProblem.Load(provider.LoadData(instance));

            #region Algorithm Configuration
            gbt.Problem         = regProblem;
            gbt.Seed            = 0;
            gbt.SetSeedRandomly = false;
            gbt.Iterations      = 5000;
            gbt.MaxSize         = 20;
            gbt.CreateSolution  = false;
            #endregion

            gbt.Start();

            Console.WriteLine(gbt.ExecutionTime);
            Assert.AreEqual(267.68704241153921, ((DoubleValue)gbt.Results["Loss (train)"].Value).Value, 1E-6);
            Assert.AreEqual(393.84704062205469, ((DoubleValue)gbt.Results["Loss (test)"].Value).Value, 1E-6);
        }
Exemplo n.º 4
0
 protected GradientBoostedTreesAlgorithm(GradientBoostedTreesAlgorithm original, Cloner cloner)
     : base(original, cloner)
 {
 }
Exemplo n.º 5
0
    public void GradientBoostingTestTowerRelativeError() {
      var gbt = new GradientBoostedTreesAlgorithm();
      var provider = new HeuristicLab.Problems.Instances.DataAnalysis.RegressionRealWorldInstanceProvider();
      var instance = provider.GetDataDescriptors().Single(x => x.Name.Contains("Tower"));
      var regProblem = new RegressionProblem();
      regProblem.Load(provider.LoadData(instance));

      #region Algorithm Configuration
      gbt.Problem = regProblem;
      gbt.Seed = 0;
      gbt.SetSeedRandomly = false;
      gbt.Iterations = 3000;
      gbt.MaxSize = 20;
      gbt.Nu = 0.005;
      gbt.LossFunctionParameter.Value = gbt.LossFunctionParameter.ValidValues.First(l => l.ToString().Contains("Relative"));
      gbt.CreateSolution = false;
      #endregion

      RunAlgorithm(gbt);

      Console.WriteLine(gbt.ExecutionTime);
      Assert.AreEqual(0.061954221604374943, ((DoubleValue)gbt.Results["Loss (train)"].Value).Value, 1E-6);
      Assert.AreEqual(0.06316303473499961, ((DoubleValue)gbt.Results["Loss (test)"].Value).Value, 1E-6);
    }
Exemplo n.º 6
0
    public void GradientBoostingTestTowerSquaredError() {
      var gbt = new GradientBoostedTreesAlgorithm();
      var provider = new HeuristicLab.Problems.Instances.DataAnalysis.RegressionRealWorldInstanceProvider();
      var instance = provider.GetDataDescriptors().Single(x => x.Name.Contains("Tower"));
      var regProblem = new RegressionProblem();
      regProblem.Load(provider.LoadData(instance));

      #region Algorithm Configuration
      gbt.Problem = regProblem;
      gbt.Seed = 0;
      gbt.SetSeedRandomly = false;
      gbt.Iterations = 5000;
      gbt.MaxSize = 20;
      gbt.CreateSolution = false;
      #endregion

      RunAlgorithm(gbt);

      Console.WriteLine(gbt.ExecutionTime);
      Assert.AreEqual(267.68704241153921, ((DoubleValue)gbt.Results["Loss (train)"].Value).Value, 1E-6);
      Assert.AreEqual(393.84704062205469, ((DoubleValue)gbt.Results["Loss (test)"].Value).Value, 1E-6);
    }
 protected GradientBoostedTreesAlgorithm(GradientBoostedTreesAlgorithm original, Cloner cloner)
   : base(original, cloner) {
 }