public void BootstrapConstructorTest3() { Accord.Math.Tools.SetupGenerator(0); // This is a sample code on how to use 0.632 Bootstrap // to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying // to learn a XOR problem and see how well does SVMs // perform on this data. double[][] data = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Bootstrap algorithm passing the set size and the number of resamplings var bootstrap = new Bootstrap(size: data.Length, subsamples: 50); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data indicated by the bootstrap. bootstrap.Fitting = delegate(int[] indicesTrain, int[] indicesValidation) { // The fitting function is passing the indices of the original set which // should be considered training data and the indices of the original set // which should be considered validation data. // Lets now grab the training data: var trainingInputs = data.Submatrix(indicesTrain); var trainingOutputs = xor.Submatrix(indicesTrain); // And now the validation data: var validationInputs = data.Submatrix(indicesValidation); var validationOutputs = xor.Submatrix(indicesValidation); // Create a Kernel Support Vector Machine to operate on the set var svm = new KernelSupportVectorMachine(new Polynomial(2), 2); // Create a training algorithm and learn the training data var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); double trainingError = smo.Run(); // Now we can compute the validation error on the validation data: double validationError = smo.ComputeError(validationInputs, validationOutputs); // Return a new information structure containing the model and the errors achieved. return new BootstrapValues(trainingError, validationError); }; // Compute the bootstrap estimate var result = bootstrap.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; // And compute the 0.632 estimate double estimate = result.Estimate; Assert.AreEqual(50, bootstrap.B); Assert.AreEqual(0, trainingErrors); Assert.AreEqual(0.021428571428571429, validationErrors); Assert.AreEqual(50, bootstrap.Subsamples.Length); Assert.AreEqual(0.013542857142857143, estimate); }
private void Test_Load(object sender, EventArgs e) { // TODO: This line of code loads data into the 'diabetesDataSetB.ContinuousData' table. You can move, or remove it, as needed. this.continuousDataTableAdapter.Fill(this.diabetesDataSetB.ContinuousData); // This is a sample code on how to use Cross-Validation // to access the performance of Support Vector Machines. // Consider the example binary data. We will be trying // to learn a XOR problem and see how well does SVMs // perform on this data. double[][] data = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Cross-validation algorithm passing the data set size and the number of folds var crossvalidation = new CrossValidation<KernelSupportVectorMachine>(size: data.Length, folds: 3); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data dicted by cross-validation. crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation) { // The fitting function is passing the indices of the original set which // should be considered training data and the indices of the original set // which should be considered validation data. // Lets now grab the training data: var trainingInputs = data.Submatrix(indicesTrain); var trainingOutputs = xor.Submatrix(indicesTrain); // And now the validation data: var validationInputs = data.Submatrix(indicesValidation); var validationOutputs = xor.Submatrix(indicesValidation); // Create a Kernel Support Vector Machine to operate on the set var svm = new KernelSupportVectorMachine(new Polynomial(2), 2); // Create a training algorithm and learn the training data var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); double trainingError = smo.Run(); // Now we can compute the validation error on the validation data: double validationError = smo.ComputeError(validationInputs, validationOutputs); // Return a new information structure containing the model and the errors achieved. return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError); }; //crossvalidation.CreatePartitions(2, data,out xor); // Compute the cross-validation var result = crossvalidation.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; }
public void CrossvalidationConstructorTest() { Accord.Math.Tools.SetupGenerator(0); // This is a sample code on how to use Cross-Validation // to assess the performance of Support Vector Machines. // Consider the example binary data. We will be trying // to learn a XOR problem and see how well does SVMs // perform on this data. double[][] data = { new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, new double[] { -1, -1 }, new double[] { 1, -1 }, new double[] { -1, 1 }, new double[] { 1, 1 }, }; int[] xor = // result of xor for the sample input data { -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, -1, 1, 1, -1, }; // Create a new Cross-validation algorithm passing the data set size and the number of folds var crossvalidation = new CrossValidation<KernelSupportVectorMachine>(size: data.Length, folds: 3); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data indicated by cross-validation. crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation) { // The fitting function is passing the indices of the original set which // should be considered training data and the indices of the original set // which should be considered validation data. // Lets now grab the training data: var trainingInputs = data.Submatrix(indicesTrain); var trainingOutputs = xor.Submatrix(indicesTrain); // And now the validation data: var validationInputs = data.Submatrix(indicesValidation); var validationOutputs = xor.Submatrix(indicesValidation); // Create a Kernel Support Vector Machine to operate on the set var svm = new KernelSupportVectorMachine(new Polynomial(2), 2); // Create a training algorithm and learn the training data var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); double trainingError = smo.Run(); // Now we can compute the validation error on the validation data: double validationError = smo.ComputeError(validationInputs, validationOutputs); // Return a new information structure containing the model and the errors achieved. return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError); }; // Compute the cross-validation var result = crossvalidation.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; Assert.AreEqual(3, crossvalidation.K); Assert.AreEqual(0, result.Training.Mean); Assert.AreEqual(0, result.Validation.Mean); Assert.AreEqual(3, crossvalidation.Folds.Length); Assert.AreEqual(3, result.Models.Length); }
private void button1_Click(object sender, EventArgs e) { // Creates a matrix from the source data table double[,] sourceMatrix = (dgvLearningSource.DataSource as DataTable).ToMatrix(out sourceColumns); // Get only the input vector values var inputs = sourceMatrix.Submatrix(0, sourceMatrix.GetLength(0) - 1, 0, 1).ToArray(); // Get only the label outputs var outputs = new int[sourceMatrix.GetLength(0)]; for (int i = 0; i < outputs.Length; i++) outputs[i] = (int)sourceMatrix[i, 2]; var cv = new CrossValidation<KernelSupportVectorMachine>(inputs.Length, 10); cv.Fitting = (int k, int[] training, int[] testing) => { var trainingInputs = inputs.Submatrix(training); var trainingOutputs = outputs.Submatrix(training); var testingInputs = inputs.Submatrix(testing); var testingOutputs = outputs.Submatrix(testing); // Create the specified Kernel IKernel kernel = getKernel(); // Creates the Support Vector Machine using the selected kernel var svm = new KernelSupportVectorMachine(kernel, 2); // Creates a new instance of the SMO Learning Algortihm var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); // Set learning parameters smo.Complexity = (double)numC.Value; smo.Tolerance = (double)numT.Value; // Run double trainingError = smo.Run(); double validationError = smo.ComputeError(testingInputs, testingOutputs); return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError); }; var result = cv.Compute(); }