Example #1
0
        /// <summary>
        ///   Initializes a new instance of the <see cref="BootstrapResult"/> class.
        /// </summary>
        /// 
        /// <param name="owner">The <see cref="Bootstrap"/> that is creating this result.</param>
        /// <param name="models">The models created during the cross-validation runs.</param>
        /// 
        public BootstrapResult(Bootstrap owner, BootstrapValues[] models)
        {
            double[] trainingValues = new double[models.Length];
            double[] trainingVariances = new double[models.Length];
            int[] trainingCount = new int[models.Length];

            double[] validationValues = new double[models.Length];
            double[] validationVariances = new double[models.Length];
            int[] validationCount = new int[models.Length];

            for (int i = 0; i < models.Length; i++)
            {
                trainingValues[i] = models[i].TrainingValue;
                trainingVariances[i] = models[i].TrainingVariance;

                validationValues[i] = models[i].ValidationValue;
                validationVariances[i] = models[i].ValidationVariance;

                owner.GetPartitionSize(i, out trainingCount[i], out validationCount[i]);
            }

            this.Settings = owner;
            this.Training = new CrossValidationStatistics(trainingCount, trainingValues, trainingVariances);
            this.Validation = new CrossValidationStatistics(validationCount, validationValues, validationVariances);

            this.Estimate = 0.632 * Validation.Mean + 0.368 * Training.Mean;
        }
Example #2
0
        public void BootstrapConstructorTest2()
        {
            // Example from Masters, 1995

            // Assume a small dataset
            double[] data = { 3, 5, 2, 1, 7 };
            // indices      { 0, 1, 2, 3, 4 }

            int[][] resamplings = 
            {
                new [] { 4, 0, 2, 0, 3 }, // indices of { 7, 3, 2, 3, 1 }
                new [] { 1, 3, 3, 0, 4 }, // indices of { 5, 1, 1, 3, 7 }
                new [] { 2, 2, 4, 3, 0 }, // indices of { 2, 2, 7, 1, 3 }
            };

            Bootstrap target = new Bootstrap(data.Length, resamplings);

            target.Fitting = (int[] trainingSamples, int[] validationSamples) =>
                {
                    double[] subsample = data.Submatrix(trainingSamples);
                    double mean = subsample.Mean();

                    return new BootstrapValues(mean, 0);
                };

            var result = target.Compute();

            double actualMean = result.Training.Mean;
            double actualVar = result.Training.Variance;

            Assert.AreEqual(3.2, actualMean, 1e-10);
            Assert.AreEqual(0.04, actualVar, 1e-10);
        }
Example #3
0
        public void BootstrapConstructorTest()
        {
            // Example from Masters, 1995

            // Assume a small dataset
            double[] data = { 3, 5, 2, 1, 7 };

            int size = data.Length;
            int resamplings = 3;
            int subsampleSize = data.Length;


            Bootstrap target = new Bootstrap(size, resamplings, subsampleSize);

            Assert.AreEqual(3, target.B);
            Assert.AreEqual(3, target.Subsamples.Length);

            for (int i = 0; i < target.Subsamples.Length; i++)
                Assert.AreEqual(5, target.Subsamples[i].Length);
        }
Example #4
0
        public void BootstrapConstructorTest3()
        {

            Accord.Math.Tools.SetupGenerator(0);

            // This is a sample code on how to use 0.632 Bootstrap
            // to assess the performance of Support Vector Machines.

            // Consider the example binary data. We will be trying
            // to learn a XOR problem and see how well does SVMs
            // perform on this data.

            double[][] data =
            {
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
            };

            int[] xor = // result of xor for the sample input data
            {
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
            };


            // Create a new Bootstrap algorithm passing the set size and the number of resamplings
            var bootstrap = new Bootstrap(size: data.Length, subsamples: 50);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by the bootstrap.

            bootstrap.Fitting = delegate(int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs = data.Submatrix(indicesTrain);
                var trainingOutputs = xor.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs = data.Submatrix(indicesValidation);
                var validationOutputs = xor.Submatrix(indicesValidation);


                // Create a Kernel Support Vector Machine to operate on the set
                var svm = new KernelSupportVectorMachine(new Polynomial(2), 2);

                // Create a training algorithm and learn the training data
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data:
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return new BootstrapValues(trainingError, validationError);
            };


            // Compute the bootstrap estimate
            var result = bootstrap.Compute();

            // Finally, access the measured performance.
            double trainingErrors = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            // And compute the 0.632 estimate
            double estimate = result.Estimate;

            Assert.AreEqual(50, bootstrap.B);
            Assert.AreEqual(0, trainingErrors);
            Assert.AreEqual(0.021428571428571429, validationErrors);

            Assert.AreEqual(50, bootstrap.Subsamples.Length);
            Assert.AreEqual(0.013542857142857143, estimate);
        }