Ejemplo n.º 1
0
		/// <summary> Builds the tree structure with hold out set
		/// 
		/// </summary>
		/// <param name="train">the data for which the tree structure is to be
		/// generated.
		/// </param>
		/// <param name="test">the test data for potential pruning
		/// </param>
		/// <param name="keepData">is training Data to be kept?
		/// </param>
		/// <exception cref="Exception">if something goes wrong
		/// </exception>
		public virtual void  buildTree(Instances train, Instances test, bool keepData)
		{
			
			Instances[] localTrain, localTest;
			int i;
			
			if (keepData)
			{
				m_train = train;
			}
			m_isLeaf = false;
			m_isEmpty = false;
			m_sons = null;
			m_localModel = m_toSelectModel.selectModel(train, test);
			m_test = new Distribution(test, m_localModel);
			if (m_localModel.numSubsets() > 1)
			{
				localTrain = m_localModel.split(train);
				localTest = m_localModel.split(test);
				train = test = null;
				m_sons = new ClassifierTree[m_localModel.numSubsets()];
				for (i = 0; i < m_sons.Length; i++)
				{
					m_sons[i] = getNewTree(localTrain[i], localTest[i]);
					localTrain[i] = null;
					localTest[i] = null;
				}
			}
			else
			{
				m_isLeaf = true;
				if (Utils.eq(train.sumOfWeights(), 0))
					m_isEmpty = true;
				train = test = null;
			}
		}
Ejemplo n.º 2
0
		/// <summary> Selects C4.5-type split for the given dataset.</summary>
		public override ClassifierSplitModel selectModel(Instances data)
		{
			
			double minResult;
			//double currentResult;
			BinC45Split[] currentModel;
			BinC45Split bestModel = null;
			NoSplit noSplitModel = null;
			double averageInfoGain = 0;
			int validModels = 0;
			bool multiVal = true;
			Distribution checkDistribution;
			double sumOfWeights;
			int i;
			
			try
			{
				
				// Check if all Instances belong to one class or if not
				// enough Instances to split.
				checkDistribution = new Distribution(data);
				noSplitModel = new NoSplit(checkDistribution);
				if (Utils.sm(checkDistribution.total(), 2 * m_minNoObj) || Utils.eq(checkDistribution.total(), checkDistribution.perClass(checkDistribution.maxClass())))
					return noSplitModel;
				
				// Check if all attributes are nominal and have a 
				// lot of values.
				System.Collections.IEnumerator enu = data.enumerateAttributes();
				//UPGRADE_TODO: Method 'java.util.Enumeration.hasMoreElements' was converted to 'System.Collections.IEnumerator.MoveNext' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilEnumerationhasMoreElements'"
				while (enu.MoveNext())
				{
					//UPGRADE_TODO: Method 'java.util.Enumeration.nextElement' was converted to 'System.Collections.IEnumerator.Current' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javautilEnumerationnextElement'"
                    weka.core.Attribute attribute = (weka.core.Attribute)enu.Current;
					if ((attribute.Numeric) || (Utils.sm((double) attribute.numValues(), (0.3 * (double) m_allData.numInstances()))))
					{
						multiVal = false;
						break;
					}
				}
				currentModel = new BinC45Split[data.numAttributes()];
				sumOfWeights = data.sumOfWeights();
				
				// For each attribute.
				for (i = 0; i < data.numAttributes(); i++)
				{
					
					// Apart from class attribute.
					if (i != (data).classIndex())
					{
						
						// Get models for current attribute.
						currentModel[i] = new BinC45Split(i, m_minNoObj, sumOfWeights);
						currentModel[i].buildClassifier(data);
						
						// Check if useful split for current attribute
						// exists and check for enumerated attributes with 
						// a lot of values.
						if (currentModel[i].checkModel())
							if ((data.attribute(i).Numeric) || (multiVal || Utils.sm((double) data.attribute(i).numValues(), (0.3 * (double) m_allData.numInstances()))))
							{
								averageInfoGain = averageInfoGain + currentModel[i].infoGain();
								validModels++;
							}
					}
					else
						currentModel[i] = null;
				}
				
				// Check if any useful split was found.
				if (validModels == 0)
					return noSplitModel;
				averageInfoGain = averageInfoGain / (double) validModels;
				
				// Find "best" attribute to split on.
				minResult = 0;
				for (i = 0; i < data.numAttributes(); i++)
				{
					if ((i != (data).classIndex()) && (currentModel[i].checkModel()))
					// Use 1E-3 here to get a closer approximation to the original
					// implementation.
						if ((currentModel[i].infoGain() >= (averageInfoGain - 1e-3)) && Utils.gr(currentModel[i].gainRatio(), minResult))
						{
							bestModel = currentModel[i];
							minResult = currentModel[i].gainRatio();
						}
				}
				
				// Check if useful split was found.
				if (Utils.eq(minResult, 0))
					return noSplitModel;
				
				// Add all Instances with unknown values for the corresponding
				// attribute to the distribution for the model, so that
				// the complete distribution is stored with the model. 
				bestModel.distribution().addInstWithUnknown(data, bestModel.attIndex());
				
				// Set the split point analogue to C45 if attribute numeric.
				bestModel.SplitPoint = m_allData;
				return bestModel;
			}
			catch (System.Exception e)
			{
                System.Console.WriteLine(e.StackTrace + " " + e.Message);
			}
			return null;
		}
Ejemplo n.º 3
0
		/// <summary> Builds the tree structure.
		/// 
		/// </summary>
		/// <param name="data">the data for which the tree structure is to be
		/// generated.
		/// </param>
		/// <param name="keepData">is training data to be kept?
		/// </param>
		/// <exception cref="Exception">if something goes wrong
		/// </exception>
		public virtual void  buildTree(Instances data, bool keepData)
		{
			
			Instances[] localInstances;
			
			if (keepData)
			{
				m_train = data;
			}
			m_test = null;
			m_isLeaf = false;
			m_isEmpty = false;
			m_sons = null;
			m_localModel = m_toSelectModel.selectModel(data);
			if (m_localModel.numSubsets() > 1)
			{
				localInstances = m_localModel.split(data);
				data = null;
				m_sons = new ClassifierTree[m_localModel.numSubsets()];
				for (int i = 0; i < m_sons.Length; i++)
				{
					m_sons[i] = getNewTree(localInstances[i]);
					localInstances[i] = null;
				}
			}
			else
			{
				m_isLeaf = true;
				if (Utils.eq(data.sumOfWeights(), 0))
					m_isEmpty = true;
				data = null;
			}
		}
		/// <summary> Computes new distributions of instances for nodes
		/// in tree.
		/// 
		/// </summary>
		/// <exception cref="Exception">if something goes wrong
		/// </exception>
		private void  newDistribution(Instances data)
		{
			
			Instances[] localInstances;
			
			localModel().resetDistribution(data);
			m_train = data;
			if (!m_isLeaf)
			{
				localInstances = (Instances[]) localModel().split(data);
				for (int i = 0; i < m_sons.Length; i++)
					son(i).newDistribution(localInstances[i]);
			}
			else
			{
				
				// Check whether there are some instances at the leaf now!
				if (!Utils.eq(data.sumOfWeights(), 0))
				{
					m_isEmpty = false;
				}
			}
		}
Ejemplo n.º 5
0
		/// <summary> Method for testing this class.
		/// 
		/// </summary>
		/// <param name="argv">should contain one element: the name of an ARFF file
		/// </param>
		//@ requires argv != null;
		//@ requires argv.length == 1;
		//@ requires argv[0] != null;
		public static void  test(System.String[] argv)
		{
			
			Instances instances, secondInstances, train, test, empty;
			//Instance instance;
			//UPGRADE_TODO: The differences in the expected value  of parameters for constructor 'java.util.Random.Random'  may cause compilation errors.  "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'"
			System.Random random = new System.Random((System.Int32) 2);
			//UPGRADE_ISSUE: Class hierarchy differences between 'java.io.Reader' and 'System.IO.StreamReader' may cause compilation errors. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1186'"
			System.IO.StreamReader reader;
			int start, num;
			//double newWeight;
			FastVector testAtts, testVals;
			int i, j;
			
			try
			{
				if (argv.Length > 1)
				{
					throw (new System.Exception("Usage: Instances [<filename>]"));
				}
				
				// Creating set of instances from scratch
				testVals = new FastVector(2);
				testVals.addElement("first_value");
				testVals.addElement("second_value");
				testAtts = new FastVector(2);
				testAtts.addElement(new Attribute("nominal_attribute", testVals));
				testAtts.addElement(new Attribute("numeric_attribute"));
				instances = new Instances("test_set", testAtts, 10);
				instances.add(new Instance(instances.numAttributes()));
				instances.add(new Instance(instances.numAttributes()));
				instances.add(new Instance(instances.numAttributes()));
				instances.ClassIndex = 0;
				System.Console.Out.WriteLine("\nSet of instances created from scratch:\n");
				//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
				System.Console.Out.WriteLine(instances);
				
				if (argv.Length == 1)
				{
					System.String filename = argv[0];
					//UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'"
					reader = new System.IO.StreamReader(filename, System.Text.Encoding.Default);
					
					// Read first five instances and print them
					System.Console.Out.WriteLine("\nFirst five instances from file:\n");
					instances = new Instances(reader, 1);
					instances.ClassIndex = instances.numAttributes() - 1;
					i = 0;
					while ((i < 5) && (instances.readInstance(reader)))
					{
						i++;
					}
					//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
					System.Console.Out.WriteLine(instances);
					
					// Read all the instances in the file
					//UPGRADE_TODO: Constructor 'java.io.FileReader.FileReader' was converted to 'System.IO.StreamReader' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073'"
					reader = new System.IO.StreamReader(filename, System.Text.Encoding.Default);
					instances = new Instances(reader);
					
					// Make the last attribute be the class 
					instances.ClassIndex = instances.numAttributes() - 1;
					
					// Print header and instances.
					System.Console.Out.WriteLine("\nDataset:\n");
					//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
					System.Console.Out.WriteLine(instances);
					System.Console.Out.WriteLine("\nClass index: " + instances.classIndex());
				}
				
				// Test basic methods based on class index.
				System.Console.Out.WriteLine("\nClass name: " + instances.classAttribute().name());
				System.Console.Out.WriteLine("\nClass index: " + instances.classIndex());
				System.Console.Out.WriteLine("\nClass is nominal: " + instances.classAttribute().Nominal);
				System.Console.Out.WriteLine("\nClass is numeric: " + instances.classAttribute().Numeric);
				System.Console.Out.WriteLine("\nClasses:\n");
				for (i = 0; i < instances.numClasses(); i++)
				{
					System.Console.Out.WriteLine(instances.classAttribute().value_Renamed(i));
				}
				System.Console.Out.WriteLine("\nClass values and labels of instances:\n");
				for (i = 0; i < instances.numInstances(); i++)
				{
					Instance inst = instances.instance(i);
					System.Console.Out.Write(inst.classValue() + "\t");
					System.Console.Out.Write(inst.toString(inst.classIndex()));
					if (instances.instance(i).classIsMissing())
					{
						System.Console.Out.WriteLine("\tis missing");
					}
					else
					{
						System.Console.Out.WriteLine();
					}
				}
				
				// Create random weights.
				System.Console.Out.WriteLine("\nCreating random weights for instances.");
				for (i = 0; i < instances.numInstances(); i++)
				{
					instances.instance(i).Weight = random.NextDouble();
				}
				
				// Print all instances and their weights (and the sum of weights).
				System.Console.Out.WriteLine("\nInstances and their weights:\n");
				System.Console.Out.WriteLine(instances.instancesAndWeights());
				System.Console.Out.Write("\nSum of weights: ");
				System.Console.Out.WriteLine(instances.sumOfWeights());
				
				// Insert an attribute
				secondInstances = new Instances(instances);
				Attribute testAtt = new Attribute("Inserted");
				secondInstances.insertAttributeAt(testAtt, 0);
				System.Console.Out.WriteLine("\nSet with inserted attribute:\n");
				//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
				System.Console.Out.WriteLine(secondInstances);
				System.Console.Out.WriteLine("\nClass name: " + secondInstances.classAttribute().name());
				
				// Delete the attribute
				secondInstances.deleteAttributeAt(0);
				System.Console.Out.WriteLine("\nSet with attribute deleted:\n");
				//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
				System.Console.Out.WriteLine(secondInstances);
				System.Console.Out.WriteLine("\nClass name: " + secondInstances.classAttribute().name());
				
				// Test if headers are equal
				System.Console.Out.WriteLine("\nHeaders equal: " + instances.equalHeaders(secondInstances) + "\n");
				
				// Print data in internal format.
				System.Console.Out.WriteLine("\nData (internal values):\n");
				for (i = 0; i < instances.numInstances(); i++)
				{
					for (j = 0; j < instances.numAttributes(); j++)
					{
						if (instances.instance(i).isMissing(j))
						{
							System.Console.Out.Write("? ");
						}
						else
						{
							System.Console.Out.Write(instances.instance(i).value_Renamed(j) + " ");
						}
					}
					System.Console.Out.WriteLine();
				}
				
				// Just print header
				System.Console.Out.WriteLine("\nEmpty dataset:\n");
				empty = new Instances(instances, 0);
				//UPGRADE_TODO: Method 'java.io.PrintStream.println' was converted to 'System.Console.Out.WriteLine' which has a different behavior. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1073_javaioPrintStreamprintln_javalangObject'"
				System.Console.Out.WriteLine(empty);
				System.Console.Out.WriteLine("\nClass name: " + empty.classAttribute().name());
				
				// Create copy and rename an attribute and a value (if possible)
				if (empty.classAttribute().Nominal)
				{
					Instances copy = new Instances(empty, 0);
					copy.renameAttribute(copy.classAttribute(), "new_name");
					copy.renameAttributeValue(copy.classAttribute(), copy.classAttribute().value_Renamed(0), "new_val_name");
					System.Console.Out.WriteLine("\nDataset with names changed:\n" + copy);
					System.Console.Out.WriteLine("\nOriginal dataset:\n" + empty);
				}
				
				// Create and prints subset of instances.
				start = instances.numInstances() / 4;
				num = instances.numInstances() / 2;
				System.Console.Out.Write("\nSubset of dataset: ");
				System.Console.Out.WriteLine(num + " instances from " + (start + 1) + ". instance");
				secondInstances = new Instances(instances, start, num);
				System.Console.Out.WriteLine("\nClass name: " + secondInstances.classAttribute().name());
				
				// Print all instances and their weights (and the sum of weights).
				System.Console.Out.WriteLine("\nInstances and their weights:\n");
				System.Console.Out.WriteLine(secondInstances.instancesAndWeights());
				System.Console.Out.Write("\nSum of weights: ");
				System.Console.Out.WriteLine(secondInstances.sumOfWeights());
				
				// Create and print training and test sets for 3-fold
				// cross-validation.
				System.Console.Out.WriteLine("\nTrain and test folds for 3-fold CV:");
				if (instances.classAttribute().Nominal)
				{
					instances.stratify(3);
				}
				for (j = 0; j < 3; j++)
				{
					//UPGRADE_TODO: The differences in the expected value  of parameters for constructor 'java.util.Random.Random'  may cause compilation errors.  "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1092'"
					train = instances.trainCV(3, j, new System.Random((System.Int32) 1));
					test = instances.testCV(3, j);
					
					// Print all instances and their weights (and the sum of weights).
					System.Console.Out.WriteLine("\nTrain: ");
					System.Console.Out.WriteLine("\nInstances and their weights:\n");
					System.Console.Out.WriteLine(train.instancesAndWeights());
					System.Console.Out.Write("\nSum of weights: ");
					System.Console.Out.WriteLine(train.sumOfWeights());
					System.Console.Out.WriteLine("\nClass name: " + train.classAttribute().name());
					System.Console.Out.WriteLine("\nTest: ");
					System.Console.Out.WriteLine("\nInstances and their weights:\n");
					System.Console.Out.WriteLine(test.instancesAndWeights());
					System.Console.Out.Write("\nSum of weights: ");
					System.Console.Out.WriteLine(test.sumOfWeights());
					System.Console.Out.WriteLine("\nClass name: " + test.classAttribute().name());
				}
				
				// Randomize instances and print them.
				System.Console.Out.WriteLine("\nRandomized dataset:");
				instances.randomize(random);
				
				// Print all instances and their weights (and the sum of weights).
				System.Console.Out.WriteLine("\nInstances and their weights:\n");
				System.Console.Out.WriteLine(instances.instancesAndWeights());
				System.Console.Out.Write("\nSum of weights: ");
				System.Console.Out.WriteLine(instances.sumOfWeights());
				
				// Sort instances according to first attribute and
				// print them.
				System.Console.Out.Write("\nInstances sorted according to first attribute:\n ");
				instances.sort(0);
				
				// Print all instances and their weights (and the sum of weights).
				System.Console.Out.WriteLine("\nInstances and their weights:\n");
				System.Console.Out.WriteLine(instances.instancesAndWeights());
				System.Console.Out.Write("\nSum of weights: ");
				System.Console.Out.WriteLine(instances.sumOfWeights());
			}
			catch (System.Exception)
			{
				//.WriteStackTrace(e, Console.Error);
			}
		}
Ejemplo n.º 6
0
		/// <summary> Performs one boosting iteration.</summary>
		private void  performIteration(double[][] trainYs, double[][] trainFs, double[][] probs, Instances data, double origSumOfWeights)
		{
			
			if (m_Debug)
			{
				System.Console.Error.WriteLine("Training classifier " + (m_NumGenerated + 1));
			}
			
			// Build the new models
			for (int j = 0; j < m_NumClasses; j++)
			{
				if (m_Debug)
				{
					System.Console.Error.WriteLine("\t...for class " + (j + 1) + " (" + m_ClassAttribute.name() + "=" + m_ClassAttribute.value_Renamed(j) + ")");
				}
				
				// Make copy because we want to save the weights
				Instances boostData = new Instances(data);
				
				// Set instance pseudoclass and weights
				for (int i = 0; i < probs.Length; i++)
				{
					
					// Compute response and weight
					double p = probs[i][j];
					double z, actual = trainYs[i][j];
					if (actual == 1 - m_Offset)
					{
						z = 1.0 / p;
						if (z > Z_MAX)
						{
							// threshold
							z = Z_MAX;
						}
					}
					else
					{
						z = (- 1.0) / (1.0 - p);
						if (z < - Z_MAX)
						{
							// threshold
							z = - Z_MAX;
						}
					}
					double w = (actual - p) / z;
					
					// Set values for instance
					Instance current = boostData.instance(i);
					current.setValue(boostData.classIndex(), z);
					current.Weight = current.weight() * w;
				}
				
				// Scale the weights (helps with some base learners)
				double sumOfWeights = boostData.sumOfWeights();
				double scalingFactor = (double) origSumOfWeights / sumOfWeights;
				for (int i = 0; i < probs.Length; i++)
				{
					Instance current = boostData.instance(i);
					current.Weight = current.weight() * scalingFactor;
				}
				
				// Select instances to train the classifier on
				Instances trainData = boostData;
				if (m_WeightThreshold < 100)
				{
					trainData = selectWeightQuantile(boostData, (double) m_WeightThreshold / 100);
				}
				else
				{
					if (m_UseResampling)
					{
						double[] weights = new double[boostData.numInstances()];
						for (int kk = 0; kk < weights.Length; kk++)
						{
							weights[kk] = boostData.instance(kk).weight();
						}
						trainData = boostData.resampleWithWeights(m_RandomInstance, weights);
					}
				}
				
				// Build the classifier
				m_Classifiers[j][m_NumGenerated].buildClassifier(trainData);
			}
			
			// Evaluate / increment trainFs from the classifier
			for (int i = 0; i < trainFs.Length; i++)
			{
				double[] pred = new double[m_NumClasses];
				double predSum = 0;
				for (int j = 0; j < m_NumClasses; j++)
				{
					pred[j] = m_Shrinkage * m_Classifiers[j][m_NumGenerated].classifyInstance(data.instance(i));
					predSum += pred[j];
				}
				predSum /= m_NumClasses;
				for (int j = 0; j < m_NumClasses; j++)
				{
					trainFs[i][j] += (pred[j] - predSum) * (m_NumClasses - 1) / m_NumClasses;
				}
			}
			m_NumGenerated++;
			
			// Compute the current probability estimates
			for (int i = 0; i < trainYs.Length; i++)
			{
                probs[i] = Calculateprobs(trainFs[i]);
			}
		}
Ejemplo n.º 7
0
		/// <summary> Builds the boosted classifier</summary>
		public virtual void  buildClassifier(Instances data)
		{
			m_RandomInstance = new Random(m_Seed);
			Instances boostData;
			int classIndex = data.classIndex();
			
			if (data.classAttribute().Numeric)
			{
				throw new Exception("LogitBoost can't handle a numeric class!");
			}
			if (m_Classifier == null)
			{
				throw new System.Exception("A base classifier has not been specified!");
			}
			
			if (!(m_Classifier is WeightedInstancesHandler) && !m_UseResampling)
			{
				m_UseResampling = true;
			}
			if (data.checkForStringAttributes())
			{
				throw new Exception("Cannot handle string attributes!");
			}
			if (m_Debug)
			{
				System.Console.Error.WriteLine("Creating copy of the training data");
			}
			
			m_NumClasses = data.numClasses();
			m_ClassAttribute = data.classAttribute();
			
			// Create a copy of the data 
			data = new Instances(data);
			data.deleteWithMissingClass();
			
			// Create the base classifiers
			if (m_Debug)
			{
				System.Console.Error.WriteLine("Creating base classifiers");
			}
			m_Classifiers = new Classifier[m_NumClasses][];
			for (int j = 0; j < m_NumClasses; j++)
			{
				m_Classifiers[j] = Classifier.makeCopies(m_Classifier, this.NumIterations);
			}
			
			// Do we want to select the appropriate number of iterations
			// using cross-validation?
			int bestNumIterations = this.NumIterations;
			if (m_NumFolds > 1)
			{
				if (m_Debug)
				{
					System.Console.Error.WriteLine("Processing first fold.");
				}
				
				// Array for storing the results
				double[] results = new double[this.NumIterations];
				
				// Iterate throught the cv-runs
				for (int r = 0; r < m_NumRuns; r++)
				{
					
					// Stratify the data
					data.randomize(m_RandomInstance);
					data.stratify(m_NumFolds);
					
					// Perform the cross-validation
					for (int i = 0; i < m_NumFolds; i++)
					{
						
						// Get train and test folds
						Instances train = data.trainCV(m_NumFolds, i, m_RandomInstance);
						Instances test = data.testCV(m_NumFolds, i);
						
						// Make class numeric
						Instances trainN = new Instances(train);
						trainN.ClassIndex = - 1;
						trainN.deleteAttributeAt(classIndex);
						trainN.insertAttributeAt(new weka.core.Attribute("'pseudo class'"), classIndex);
						trainN.ClassIndex = classIndex;
						m_NumericClassData = new Instances(trainN, 0);
						
						// Get class values
						int numInstances = train.numInstances();
						double[][] tmpArray = new double[numInstances][];
						for (int i2 = 0; i2 < numInstances; i2++)
						{
							tmpArray[i2] = new double[m_NumClasses];
						}
						double[][] trainFs = tmpArray;
						double[][] tmpArray2 = new double[numInstances][];
						for (int i3 = 0; i3 < numInstances; i3++)
						{
							tmpArray2[i3] = new double[m_NumClasses];
						}
						double[][] trainYs = tmpArray2;
						for (int j = 0; j < m_NumClasses; j++)
						{
							for (int k = 0; k < numInstances; k++)
							{
								trainYs[k][j] = (train.instance(k).classValue() == j)?1.0 - m_Offset:0.0 + (m_Offset / (double) m_NumClasses);
							}
						}
						
						// Perform iterations
						double[][] probs = initialProbs(numInstances);
						m_NumGenerated = 0;
						double sumOfWeights = train.sumOfWeights();
						for (int j = 0; j < this.NumIterations; j++)
						{
							performIteration(trainYs, trainFs, probs, trainN, sumOfWeights);
							Evaluation eval = new Evaluation(train);
							eval.evaluateModel(this, test);
							results[j] += eval.correct();
						}
					}
				}
				
				// Find the number of iterations with the lowest error
				//UPGRADE_TODO: The equivalent in .NET for field 'java.lang.Double.MAX_VALUE' may return a different value. "ms-help://MS.VSCC.v80/dv_commoner/local/redirect.htm?index='!DefaultContextWindowIndex'&keyword='jlca1043'"
				double bestResult = - System.Double.MaxValue;
				for (int j = 0; j < this.NumIterations; j++)
				{
					if (results[j] > bestResult)
					{
						bestResult = results[j];
						bestNumIterations = j;
					}
				}
				if (m_Debug)
				{
					System.Console.Error.WriteLine("Best result for " + bestNumIterations + " iterations: " + bestResult);
				}
			}
			
			// Build classifier on all the data
			int numInstances2 = data.numInstances();
			double[][] trainFs2 = new double[numInstances2][];
			for (int i4 = 0; i4 < numInstances2; i4++)
			{
				trainFs2[i4] = new double[m_NumClasses];
			}
			double[][] trainYs2 = new double[numInstances2][];
			for (int i5 = 0; i5 < numInstances2; i5++)
			{
				trainYs2[i5] = new double[m_NumClasses];
			}
			for (int j = 0; j < m_NumClasses; j++)
			{
				for (int i = 0, k = 0; i < numInstances2; i++, k++)
				{
					trainYs2[i][j] = (data.instance(k).classValue() == j)?1.0 - m_Offset:0.0 + (m_Offset / (double) m_NumClasses);
				}
			}
			
			// Make class numeric
			data.ClassIndex = - 1;
			data.deleteAttributeAt(classIndex);
			data.insertAttributeAt(new weka.core.Attribute("'pseudo class'"), classIndex);
			data.ClassIndex = classIndex;
			m_NumericClassData = new Instances(data, 0);
			
			// Perform iterations
			double[][] probs2 = initialProbs(numInstances2);
            double logLikelihood = CalculateLogLikelihood(trainYs2, probs2);
			m_NumGenerated = 0;
			if (m_Debug)
			{
				System.Console.Error.WriteLine("Avg. log-likelihood: " + logLikelihood);
			}
			double sumOfWeights2 = data.sumOfWeights();
			for (int j = 0; j < bestNumIterations; j++)
			{
				double previousLoglikelihood = logLikelihood;
				performIteration(trainYs2, trainFs2, probs2, data, sumOfWeights2);
                logLikelihood = CalculateLogLikelihood(trainYs2, probs2);
				if (m_Debug)
				{
					System.Console.Error.WriteLine("Avg. log-likelihood: " + logLikelihood);
				}
				if (System.Math.Abs(previousLoglikelihood - logLikelihood) < m_Precision)
				{
					return ;
				}
			}
		}