Ejemplo n.º 1
0
		/// <summary> Performs one boosting iteration.</summary>
		private void  performIteration(double[][] trainYs, double[][] trainFs, double[][] probs, Instances data, double origSumOfWeights)
		{
			
			if (m_Debug)
			{
				System.Console.Error.WriteLine("Training classifier " + (m_NumGenerated + 1));
			}
			
			// Build the new models
			for (int j = 0; j < m_NumClasses; j++)
			{
				if (m_Debug)
				{
					System.Console.Error.WriteLine("\t...for class " + (j + 1) + " (" + m_ClassAttribute.name() + "=" + m_ClassAttribute.value_Renamed(j) + ")");
				}
				
				// Make copy because we want to save the weights
				Instances boostData = new Instances(data);
				
				// Set instance pseudoclass and weights
				for (int i = 0; i < probs.Length; i++)
				{
					
					// Compute response and weight
					double p = probs[i][j];
					double z, actual = trainYs[i][j];
					if (actual == 1 - m_Offset)
					{
						z = 1.0 / p;
						if (z > Z_MAX)
						{
							// threshold
							z = Z_MAX;
						}
					}
					else
					{
						z = (- 1.0) / (1.0 - p);
						if (z < - Z_MAX)
						{
							// threshold
							z = - Z_MAX;
						}
					}
					double w = (actual - p) / z;
					
					// Set values for instance
					Instance current = boostData.instance(i);
					current.setValue(boostData.classIndex(), z);
					current.Weight = current.weight() * w;
				}
				
				// Scale the weights (helps with some base learners)
				double sumOfWeights = boostData.sumOfWeights();
				double scalingFactor = (double) origSumOfWeights / sumOfWeights;
				for (int i = 0; i < probs.Length; i++)
				{
					Instance current = boostData.instance(i);
					current.Weight = current.weight() * scalingFactor;
				}
				
				// Select instances to train the classifier on
				Instances trainData = boostData;
				if (m_WeightThreshold < 100)
				{
					trainData = selectWeightQuantile(boostData, (double) m_WeightThreshold / 100);
				}
				else
				{
					if (m_UseResampling)
					{
						double[] weights = new double[boostData.numInstances()];
						for (int kk = 0; kk < weights.Length; kk++)
						{
							weights[kk] = boostData.instance(kk).weight();
						}
						trainData = boostData.resampleWithWeights(m_RandomInstance, weights);
					}
				}
				
				// Build the classifier
				m_Classifiers[j][m_NumGenerated].buildClassifier(trainData);
			}
			
			// Evaluate / increment trainFs from the classifier
			for (int i = 0; i < trainFs.Length; i++)
			{
				double[] pred = new double[m_NumClasses];
				double predSum = 0;
				for (int j = 0; j < m_NumClasses; j++)
				{
					pred[j] = m_Shrinkage * m_Classifiers[j][m_NumGenerated].classifyInstance(data.instance(i));
					predSum += pred[j];
				}
				predSum /= m_NumClasses;
				for (int j = 0; j < m_NumClasses; j++)
				{
					trainFs[i][j] += (pred[j] - predSum) * (m_NumClasses - 1) / m_NumClasses;
				}
			}
			m_NumGenerated++;
			
			// Compute the current probability estimates
			for (int i = 0; i < trainYs.Length; i++)
			{
                probs[i] = Calculateprobs(trainFs[i]);
			}
		}