/// <summary> /// This method implements the main functionality of stochastic gradient boosting /// </summary> private void BuildBoostTree(Metrics metrics, BoostTreeLoss boostTreeLoss, DataFeatureSampleRate dataFeatureSampleRate, int maxTreeSize, int minNumSamples, int numIter, int cThreads, Random r) { float minValidationErr = 100; float[] funValueGain = new float[this.numSamples]; //(1) compute scores produced by the sub-model boostTreeLoss.ModelEval(this.subModel, this.labelFeatureDataCoded, this.subModelScore); //(2) compute the corresponding function values; boostTreeLoss.ModelScoresToFuncValues(); //(3) compute the metrics of the sub-model int m = optIter = 0; metrics.ComputeMetrics(boostTreeLoss.ModelScores, m, false); #if VERBOSE Console.WriteLine(metrics.ResultsHeaderStr()); Console.WriteLine(metrics.ResultsStr(m)); #endif //(4) creat samplers to sub-sampl the features and data during node spliting RandomSampler featureSampler = new RandomSampler(r); RandomSampler dataSampler = new RandomSampler(r); //(5) creat the object that does node splitting #if SINGLE_THREAD // single-threaded this.findSplit = new FindSplitSync(); #else // multi-threaded this.findSplit = new FindSplitAsync(cThreads); #endif //SINGLE_THREAD //(6) Iteratively building boosted trees for (m = 0; m < numIter; m++) { // selecting a fraction of data groups for each iteration float sampleRate = dataFeatureSampleRate.SampleDataGroupRate(m); DataSet workDataSet = this.labelFeatureDataCoded.DataGroups.GetDataPartition(DataPartitionType.Train, sampleRate, r); workDataSet.Sort(); // sorting gains some noticable speedup. // compute the pseudo response of the current system boostTreeLoss.ComputePseudoResponse(workDataSet); //set the data and feature sampling rate for node spliting in this iteration featureSampler.SampleRate = dataFeatureSampleRate.SampleFeatureRate(m); dataSampler.SampleRate = dataFeatureSampleRate.SampleDataRate(m); // fit a residual model (regression trees) from the pesuso response // to compensate the error of the current system for (int k = 0; k < boostTreeLoss.NumTreesPerIteration; k++) { //only use the important data points if necessary int[] trimIndex = boostTreeLoss.TrimIndex(workDataSet, k, m); //build a regression tree according to the pseduo-response this.regressionTrees[m, k] = new RegressionTree(this.labelFeatureDataCoded, boostTreeLoss, k, trimIndex, dataSampler, featureSampler, maxTreeSize, minNumSamples, this.findSplit, this.tempSpace); //compute the function value of all data points produced by the newly generated regression tree this.regressionTrees[m, k].PredictFunValue(this.labelFeatureDataCoded, ref funValueGain); //try to do a more global optimalization - refine the leaf node response of a decision tree //by looking at all the training data points, instead of only the ones falling into the regaion. //Here we are estimate and apply a global mutiplication factor for all leaf nodes float adjFactor = (m>0) ? boostTreeLoss.ComputeResponseAdjust(funValueGain) : 1.0F; //apply the multiplication factor to the leaf nodes of the newly generated regression tree this.regressionTrees[m, k].AdjustResponse(adjFactor); //update the function value for all data points given the new regression tree boostTreeLoss.AccFuncValueGain(funValueGain, adjFactor, k); } //compute the metrics of the current system boostTreeLoss.FuncValuesToModelScores(); metrics.ComputeMetrics(boostTreeLoss.ModelScores, m + 1, false); #if VERBOSE Console.WriteLine(metrics.ResultsStr(m+1)); #endif //keep track of the best (minimal Error) iteration on the Validation data set this.optIter = metrics.GetBest(DataPartitionType.Validation, ref minValidationErr); if ((m+1) % 5 == 0) // save the tree every 5 iterations SaveBoostTree(); } if (this.findSplit != null) { this.findSplit.Cleanup(); } }
/// <summary> /// This method implements the main functionality of stochastic gradient boosting, for distributed computing /// </summary> private void DistributedBuildBoostTree(Metrics metrics, BoostTreeLoss boostTreeLoss, DataFeatureSampleRate dataFeatureSampleRate, int maxTreeSize, int minNumSamples, int numIter, int cThreads, Random r) { float minValidationErr = 100; float[] funValueGain = new float[this.numSamples]; //(1) compute scores produced by the sub-model boostTreeLoss.ModelEval(this.subModel, this.labelFeatureDataCoded, this.subModelScore); //(2) compute the corresponding function values; boostTreeLoss.ModelScoresToFuncValues(); //(3) compute the metrics of the sub-model int m = optIter = 0; metrics.ComputeMetrics(boostTreeLoss.ModelScores, m, false); #if VERBOSE Console.WriteLine(metrics.ResultsHeaderStr()); Console.WriteLine(metrics.ResultsStr(m)); #endif //(4) creat samplers to sub-sampl the features and data during node spliting RandomSampler featureSampler = new RandomSampler(r); RandomSampler dataSampler = new RandomSampler(r); //(5) creat the object that does node splitting #if SINGLE_THREAD // single-threaded this.findSplit = new FindSplitSync(); #else // multi-threaded this.findSplit = new FindSplitAsync(cThreads); #endif //SINGLE_THREAD //(6) Iteratively building boosted trees for (m = 0; m < numIter; m++) { //returns array of regression trees (one per class k) for this iteration RegressionTree[] candidateTree = GetNextWeakLearner(m, funValueGain, metrics,boostTreeLoss,dataFeatureSampleRate, dataSampler, featureSampler, maxTreeSize,minNumSamples,cThreads,r); AddWeakLearner(candidateTree, funValueGain, m, metrics, boostTreeLoss, dataFeatureSampleRate, maxTreeSize, minNumSamples, cThreads, r); //compute the metrics of the current system boostTreeLoss.FuncValuesToModelScores(); metrics.ComputeMetrics(boostTreeLoss.ModelScores, m + 1, false); #if VERBOSE Console.WriteLine(metrics.ResultsStr(m + 1)); #endif //keep track of the best (minimal Error) iteration on the Validation data set this.optIter = metrics.GetBest(DataPartitionType.Validation, ref minValidationErr); if ((m + 1) % 5 == 0) // save the tree every 5 iterations SaveBoostTree(); } if (this.findSplit != null) { this.findSplit.Cleanup(); } }