Ejemplo n.º 1
0
        public static void TestSVM(double[][] inputs, int[] outputs)
        {
            var crossValidation = new CrossValidation(inputs.Length, 10);

            crossValidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                var trainingInputs  = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);

                var sw1           = Stopwatch.StartNew();
                var svm           = new SVM();
                var trainingError = svm.TrainSVM(new RationalQuadratic(1), 3, trainingInputs, trainingOutputs);
                sw1.Stop();
                Console.WriteLine("Training for: " + sw1.ElapsedMilliseconds + "ms with errors: " + trainingError);

                var validationError = svm.GetSMO().ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues(svm, trainingError, validationError));
            };

            // Compute the cross-validation
            var result = crossValidation.Compute();

            // Finally, access the measured performance.
            var trainingErrors   = result.Training.Mean;
            var validationErrors = result.Validation.Mean;

            Console.WriteLine("Finished with " + trainingErrors + " training errors and " + validationErrors + " validation errors");
        }
Ejemplo n.º 2
0
        public CrossValidationResult <object> Validate(IClassifier classifier, TrainingData trainingData, int folds = 10)
        {
            var crossValidation = new CrossValidation(size: trainingData.Inputs.Length, folds: folds);

            crossValidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                var trainingInputs  = trainingData.Inputs.Get(indicesTrain);
                var trainingOutputs = trainingData.Outputs.Get(indicesTrain);

                var validationInputs  = trainingData.Inputs.Get(indicesValidation);
                var validationOutputs = trainingData.Outputs.Get(indicesValidation);

                var foldClassifier = classifier.CreateInstance(trainingData.FeatureDefaultsValueTypes, trainingData.FeatureGranularities);
                foldClassifier.Train(trainingInputs, trainingOutputs);

                var trainingPredicted   = foldClassifier.Decide(trainingInputs);
                var validationPredicted = foldClassifier.Decide(validationInputs);

                double trainingError   = new ZeroOneLoss(trainingOutputs).Loss(trainingPredicted);
                double validationError = new ZeroOneLoss(validationOutputs).Loss(validationPredicted);

                var confusionMatrix = new ConfusionMatrix(validationPredicted, validationOutputs, positiveValue: 1, negativeValue: 0);

                Console.WriteLine($"{k}\t{trainingError}\t{validationError}\t{confusionMatrix.Accuracy}\t{confusionMatrix.TruePositives}\t{confusionMatrix.TrueNegatives}\t{confusionMatrix.FalsePositives}\t{confusionMatrix.FalseNegatives}\t{confusionMatrix.FalsePositiveRate}");

                return(new CrossValidationValues(foldClassifier, trainingError, validationError));
            };

            var result = crossValidation.Compute();

            return(result);
        }
Ejemplo n.º 3
0
        public static void TestKNN(double[][] inputs, int[] outputs, int kValue)
        {
            var crossValidation = new CrossValidation(inputs[0].Length, 10);

            crossValidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                var trainingInputs  = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);

                var sw  = Stopwatch.StartNew();
                var knn = new KNN();
                knn.TrainKNN(trainingInputs, trainingOutputs, kValue);
                sw.Stop();

                //Console.WriteLine("Training for: " + sw.ElapsedMilliseconds + "ms");

                var error = knn.ComputeError(validationInputs, validationOutputs);

                return(new CrossValidationValues(knn, 0, error));
            };

            // Compute the cross-validation
            var result = crossValidation.Compute();

            // Finally, access the measured performance.
            var trainingErrors   = result.Training.Mean;
            var validationErrors = result.Validation.Mean;

            Console.WriteLine("Finished with " + trainingErrors + " training errors and " + validationErrors + " validation errors");
        }
Ejemplo n.º 4
0
        public void NotEnoughSamplesTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            int[] labels = Matrix.Vector(10, 1).Concatenate(Matrix.Vector(30, 0));

            Vector.Shuffle(labels);

            var crossvalidation = new CrossValidation <MulticlassSupportVectorMachine>(labels, 2, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation  = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(1, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining  = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9, countTraining);

                    return(new CrossValidationValues <MulticlassSupportVectorMachine>(null, 0, 0));
                }
            };

            crossvalidation.Compute();
        }
Ejemplo n.º 5
0
        public void FittingTest()
        {
            int[] folds = CrossValidation.Splittings(100, 10);

            int[] samples = Matrix.Indices(0, 100);

            CrossValidation val = new CrossValidation(folds, 10);

            val.RunInParallel = false;

            int current = 0;

            val.Fitting = (k, trainingSamples, validationSamples) =>
            {
                Assert.AreEqual(current, k);
                Assert.AreEqual(90, trainingSamples.Length);
                Assert.AreEqual(10, validationSamples.Length);

                int[] trainingSet   = samples.Submatrix(trainingSamples);
                int[] validationSet = samples.Submatrix(validationSamples);

                for (int i = 0; i < trainingSet.Length; i++)
                {
                    Assert.AreEqual(samples[trainingSamples[i]], trainingSet[i]);
                }

                for (int i = 0; i < validationSet.Length; i++)
                {
                    Assert.AreEqual(samples[validationSamples[i]], validationSet[i]);
                }

                current++;

                return(new CrossValidationValues(k, 2 * k));
            };

            var result = val.Compute();

            Assert.AreEqual(10, current);
            Assert.AreEqual(4.5, result.Training.Mean);
            Assert.AreEqual(9.0, result.Validation.Mean);
            Assert.AreEqual(
                2 * result.Training.StandardDeviation,
                result.Validation.StandardDeviation);

            Assert.AreEqual(val.Folds.Length, result.Training.Sizes.Length);
            Assert.AreEqual(val.Folds.Length, result.Validation.Sizes.Length);

            for (int i = 0; i < result.Training.Sizes.Length; i++)
            {
                Assert.AreEqual(90, result.Training.Sizes[i]);
            }

            for (int i = 0; i < result.Validation.Sizes.Length; i++)
            {
                Assert.AreEqual(10, result.Validation.Sizes[i]);
            }
        }
        public override Task <List <GeneralConfusionMatrix> > ComputeFoldedConfusionMatrixAsync(ClassificationModel classificationModel, int folds)
        {
            return(Task.Factory.StartNew(() =>
            {
                int numFeatures = classificationModel.FeatureVectors.Count;
                DecisionVariable[] decisionVariables = Enumerable.ToArray(classificationModel.Bands.Select(b => DecisionVariable.Continuous(b.ToString())));

                double[][] input = new double[numFeatures][];
                int[] responses = new int[numFeatures];

                for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex)
                {
                    var featureVector = classificationModel.FeatureVectors[featureIndex];

                    input[featureIndex] = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue);
                    responses[featureIndex] = featureVector.FeatureClass;
                }

                List <GeneralConfusionMatrix> confusionMatrices = new List <GeneralConfusionMatrix>();

                // Create a new Cross-validation algorithm passing the data set size and the number of folds
                var crossvalidation = new CrossValidation(input.Length, folds);

                crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
                {
                    // Lets now grab the training data:
                    var trainingInputs = input.Get(indicesTrain);
                    var trainingOutputs = responses.Get(indicesTrain);

                    // And now the validation data:
                    var validationInputs = input.Get(indicesValidation);
                    var validationOutputs = responses.Get(indicesValidation);

                    var tree = new DecisionTree(decisionVariables, Enum.GetValues(typeof(LandcoverTypeViewModel)).Length);
                    C45Learning id3Learning = new C45Learning(tree);
                    id3Learning.Learn(trainingInputs, trainingOutputs);

                    var predictedTraining = tree.Decide(trainingInputs);
                    var predictedValidation = tree.Decide(validationInputs);

                    double trainingError = new ZeroOneLoss(trainingOutputs).Loss(predictedTraining);
                    double validationError = new ZeroOneLoss(validationOutputs).Loss(predictedValidation);

                    GeneralConfusionMatrix confusionMatrix = new GeneralConfusionMatrix(Enum.GetValues(typeof(LandcoverTypeViewModel)).Length - 1, validationOutputs, predictedValidation);
                    confusionMatrices.Add(confusionMatrix);

                    // Return a new information structure containing the model and the errors achieved.
                    return new CrossValidationValues(trainingError, validationError);
                };

                var result = crossvalidation.Compute();

                return confusionMatrices;
            }));
        }
Ejemplo n.º 7
0
        public void NotEnoughSamplesTest1()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[] labels = Matrix.Vector(10, 1).Concatenate(Matrix.Vector(30, 0));

            Vector.Shuffle(labels);

            var crossvalidation = new CrossValidation <MulticlassSupportVectorMachine>(size: 40, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation  = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(2, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining  = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9 * 2, countTraining);

                    return(new CrossValidationValues <MulticlassSupportVectorMachine>(null, 0, 0));
                }
            };

            bool thrown = false;

            try { crossvalidation.Compute(); }
            catch (Exception) { thrown = true; }
            Assert.IsTrue(thrown);

            crossvalidation = new CrossValidation <MulticlassSupportVectorMachine>(labels, 2, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation  = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(1, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining  = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9, countTraining);

                    return(new CrossValidationValues <MulticlassSupportVectorMachine>(null, 0, 0));
                }
            };

            crossvalidation.Compute();
        }
Ejemplo n.º 8
0
        public override Task <List <GeneralConfusionMatrix> > ComputeFoldedConfusionMatrixAsync(ClassificationModel classificationModel, int folds)
        {
            return(Task.Factory.StartNew(() =>
            {
                int numFeatures = classificationModel.FeatureVectors.Count;

                double[][] input = new double[numFeatures][];
                int[] responses = new int[numFeatures];

                for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex)
                {
                    var featureVector = classificationModel.FeatureVectors[featureIndex];

                    input[featureIndex] = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue);
                    responses[featureIndex] = featureVector.FeatureClass;
                }

                List <GeneralConfusionMatrix> confusionMatrices = new List <GeneralConfusionMatrix>();

                // Create a new Cross-validation algorithm passing the data set size and the number of folds
                var crossvalidation = new CrossValidation(input.Length, folds);

                crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
                {
                    // Lets now grab the training data:
                    var trainingInputs = input.Get(indicesTrain);
                    var trainingOutputs = responses.Get(indicesTrain);

                    // And now the validation data:
                    var validationInputs = input.Get(indicesValidation);
                    var validationOutputs = responses.Get(indicesValidation);

                    int[] predictedTraining;
                    int[] predictedValidation;
                    TrainAndPredict(Complexity, Gamma, Degree, trainingInputs, trainingOutputs, validationInputs, out predictedTraining, out predictedValidation);

                    double trainingError = new ZeroOneLoss(trainingOutputs).Loss(predictedTraining);
                    double validationError = new ZeroOneLoss(validationOutputs).Loss(predictedValidation);

                    GeneralConfusionMatrix confusionMatrix = new GeneralConfusionMatrix(classificationModel.LandCoverTypes.Count, validationOutputs, predictedValidation);
                    confusionMatrices.Add(confusionMatrix);

                    // Return a new information structure containing the model and the errors achieved.
                    return new CrossValidationValues(trainingError, validationError);
                };

                crossvalidation.Compute();

                return confusionMatrices;
            }));
        }
Ejemplo n.º 9
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvLearningSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            // Get only the input vector values
            var inputs = sourceMatrix.Submatrix(0, sourceMatrix.GetLength(0) - 1, 0, 1).ToArray();

            // Get only the label outputs
            var outputs = new int[sourceMatrix.GetLength(0)];

            for (int i = 0; i < outputs.Length; i++)
            {
                outputs[i] = (int)sourceMatrix[i, 2];
            }

            var cv = new CrossValidation <KernelSupportVectorMachine>(inputs.Length, 10);

            cv.Fitting = (int k, int[] training, int[] testing) =>
            {
                var trainingInputs  = inputs.Submatrix(training);
                var trainingOutputs = outputs.Submatrix(training);
                var testingInputs   = inputs.Submatrix(testing);
                var testingOutputs  = outputs.Submatrix(testing);

                // Create the specified Kernel
                IKernel kernel = getKernel();


                // Creates the Support Vector Machine using the selected kernel
                var svm = new KernelSupportVectorMachine(kernel, 2);

                // Creates a new instance of the SMO Learning Algortihm
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                // Set learning parameters
                smo.Complexity = (double)numC.Value;
                smo.Tolerance  = (double)numT.Value;

                // Run
                double trainingError   = smo.Run();
                double validationError = smo.ComputeError(testingInputs, testingOutputs);

                return(new CrossValidationValues <KernelSupportVectorMachine>(svm, trainingError, validationError));
            };

            var result = cv.Compute();
        }
Ejemplo n.º 10
0
        public CrossValidationResult <RandomForest> GetCrossValidationResultsOfRandomForestModel(AppIdentAcordSource appIdentAcordSource, GridSearchParameterCollection bestParameters, int folds = 10)
        {
            var samples           = appIdentAcordSource.Samples;
            var labels            = appIdentAcordSource.LabelsAsIntegers;
            var decisionVariables = appIdentAcordSource.DecisionVariables;
            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation <RandomForest>(samples.Length, folds)
            {
                Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
                {
                    // The fitting function is passing the indices of the original set which
                    // should be considered training data and the indices of the original set
                    // which should be considered validation data.
                    Console.WriteLine($"{DateTime.Now} RandomForest cross validation.");
                    // Lets now grab the training data:
                    var trainingInputs  = samples.Get(indicesTrain);
                    var trainingOutputs = labels.Get(indicesTrain);
                    // And now the validation data:
                    var validationInputs  = samples.Get(indicesValidation);
                    var validationOutputs = labels.Get(indicesValidation);
                    // create random forest model with the best parameters from grid search results
                    var rfcModel = CreateRandomForestModel(decisionVariables, bestParameters, trainingInputs, trainingOutputs);
                    // compute the training error rate with ZeroOneLoss function
                    var trainingError = new ZeroOneLoss(trainingOutputs).Loss(rfcModel.Decide(trainingInputs));
                    // Now we can compute the validation error on the validation data:
                    var validationError = new ZeroOneLoss(validationOutputs).Loss(rfcModel.Decide(validationInputs));
                    // Return a new information structure containing the model and the errors achieved.

                    var tag = new ValidationDataSource(validationInputs, validationOutputs);
                    return(new CrossValidationValues <RandomForest>(rfcModel, trainingError, validationError)
                    {
                        Tag = tag
                    });
                }
            };

            // Compute the cross-validation
            return(crossvalidation.Compute());
        }
Ejemplo n.º 11
0
        public void NotEnoughSamplesTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            int[] labels = Matrix.Vector(10, 1).Concatenate(Matrix.Vector(30, 0));

            Accord.Statistics.Tools.Shuffle(labels);

            var crossvalidation = new CrossValidation<MulticlassSupportVectorMachine>(labels, 2, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(1, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9, countTraining);

                    return new CrossValidationValues<MulticlassSupportVectorMachine>(null, 0, 0);
                }
            };

            crossvalidation.Compute();
        }
Ejemplo n.º 12
0
        private int OptimizeK()
        {
            Tuple <KNearestNeighbors, double> bestModel = null;

            for (int k = 50; k <= 200; k++)
            {
                var crossvalidation = new CrossValidation <KNearestNeighbors>(_input.Length, 5);

                crossvalidation.Fitting = delegate(int fold, int[] indicesTrain, int[] indicesValidation)
                {
                    var trainingInputs  = _input.Submatrix(indicesTrain);
                    var trainingOutputs = _output.Submatrix(indicesTrain);

                    var validationInputs  = _input.Submatrix(indicesValidation);
                    var validationOutputs = _output.Submatrix(indicesValidation);

                    var predictor = new KNearestNeighbors(k, _classes, trainingInputs, trainingOutputs);

                    // Create a training algorithm and learn the training data

                    var trainingError = 0.0;

                    for (int i = 0; i < trainingInputs.Length; i++)
                    {
                        int[] nearest;
                        predictor.GetNearestNeighbors(trainingInputs[i], out nearest);

                        var prediction = InverseDistanceWeightedAverage(nearest);

                        if (prediction > 0 && trainingOutputs[i] > 0 ||
                            prediction < 0 && trainingOutputs[i] < 0 ||
                            prediction.Equals(trainingOutputs[i]))
                        {
                            continue;
                        }

                        trainingError++;
                    }

                    double validationError = 0.0;

                    for (int i = 0; i < validationInputs.Length; i++)
                    {
                        int[] nearest;
                        predictor.GetNearestNeighbors(validationInputs[i], out nearest);

                        var prediction = InverseDistanceWeightedAverage(nearest);

                        if (prediction > 0 && validationOutputs[i] > 0 ||
                            prediction < 0 && validationOutputs[i] < 0 ||
                            prediction.Equals(validationOutputs[i]))
                        {
                            continue;
                        }

                        validationError++;
                    }

                    trainingError   /= trainingInputs.Length;
                    validationError /= validationInputs.Length;

                    return(new CrossValidationValues <KNearestNeighbors>(predictor, trainingError, validationError));
                };

                var result = crossvalidation.Compute();

                //var minError = result.Models.Select(y => y.ValidationValue).Min();
                var minError = result.Models.Select(y => Math.Sqrt(Math.Pow(y.TrainingValue + y.ValidationValue, 2.0))).Min();

                if (bestModel == null || minError < bestModel.Item2)
                {
                    var bestFit = result.Models.FirstOrDefault(x => minError.Equals(x.ValidationValue))?.Model;
                    bestModel = bestFit == null ? bestModel : new Tuple <KNearestNeighbors, double>(bestFit, minError);
                }
            }

            return(bestModel?.Item1.K ?? 80);
        }
Ejemplo n.º 13
0
        public void CrossvalidationConstructorTest2()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // This is a sample code on how to use Cross-Validation
            // to assess the performance of Hidden Markov Models.

            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0, 1, 1, 0 },    // Class 0
                new int[] { 0, 0, 1, 0 },    // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 1, 0 },    // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 1, 1, 0 }, // Class 0
                new int[] { 0, 1, 0, 1, 0 }, // Class 0
                new int[] { 0, 1, 0 },       // Class 0
                new int[] { 0, 1, 1, 0 },    // Class 0

                new int[] { 1, 0, 0, 1 },    // Class 1
                new int[] { 1, 1, 0, 1 },    // Class 1
                new int[] { 1, 0, 0, 0, 1 }, // Class 1
                new int[] { 1, 0, 1 },       // Class 1
                new int[] { 1, 1, 0, 1 },    // Class 1
                new int[] { 1, 0, 1 },       // Class 1
                new int[] { 1, 0, 0, 1 },    // Class 1
                new int[] { 1, 0, 0, 0, 1 }, // Class 1
                new int[] { 1, 0, 1 },       // Class 1
                new int[] { 1, 0, 0, 0, 1 }, // Class 1
            };

            int[] outputs = new int[]
            {
                0, 0, 0, 0, 0, 0, 0, 0, 0, 0, // First 10 sequences are of class 0
                1, 1, 1, 1, 1, 1, 1, 1, 1, 1, // Last 10 sequences are of class 1
            };


            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation <HiddenMarkovClassifier>(size: inputs.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs  = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);


                // We are trying to predict two different classes
                int classes = 2;

                // Each sequence may have up to two symbols (0 or 1)
                int symbols = 2;

                // Nested models will have two states each
                int[] states = new int[] { 2, 2 };

                // Creates a new Hidden Markov Model Classifier with the given parameters
                HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


                // Create a new learning algorithm to train the sequence classifier
                var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                                 // Train each model until the log-likelihood changes less than 0.001
                                                                 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance  = 0.001,
                    Iterations = 0
                }
                                                                 );

                // Train the sequence classifier using the algorithm
                double likelihood = teacher.Run(trainingInputs, trainingOutputs);

                double trainingError = teacher.ComputeError(trainingInputs, trainingOutputs);

                // Now we can compute the validation error on the validation data:
                double validationError = teacher.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues <HiddenMarkovClassifier>(classifier, trainingError, validationError));
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors   = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            Assert.AreEqual(3, crossvalidation.K);
            Assert.AreEqual(0, result.Training.Mean);
            Assert.AreEqual(0, result.Validation.Mean);

            Assert.AreEqual(3, crossvalidation.Folds.Length);
            Assert.AreEqual(3, result.Models.Length);
        }
Ejemplo n.º 14
0
        public void CrossvalidationConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // This is a sample code on how to use Cross-Validation
            // to assess the performance of Support Vector Machines.

            // Consider the example binary data. We will be trying
            // to learn a XOR problem and see how well does SVMs
            // perform on this data.

            double[][] data =
            {
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
            };

            int[] xor = // result of xor for the sample input data
            {
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
            };


            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation <KernelSupportVectorMachine>(size: data.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs  = data.Submatrix(indicesTrain);
                var trainingOutputs = xor.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = data.Submatrix(indicesValidation);
                var validationOutputs = xor.Submatrix(indicesValidation);


                // Create a Kernel Support Vector Machine to operate on the set
                var svm = new KernelSupportVectorMachine(new Polynomial(2), 2);

                // Create a training algorithm and learn the training data
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data:
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues <KernelSupportVectorMachine>(svm, trainingError, validationError));
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors   = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            Assert.AreEqual(3, crossvalidation.K);
            Assert.AreEqual(0, result.Training.Mean);
            Assert.AreEqual(0, result.Validation.Mean);

            Assert.AreEqual(3, crossvalidation.Folds.Length);
            Assert.AreEqual(3, result.Models.Length);
        }
Ejemplo n.º 15
0
        public void NotEnoughSamplesTest1()
        {
            Accord.Math.Random.Generator.Seed = 0;

            int[] labels = Matrix.Vector(10, 1).Concatenate(Matrix.Vector(30, 0));

            Vector.Shuffle(labels);

            var crossvalidation = new CrossValidation<MulticlassSupportVectorMachine>(size: 40, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(2, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9 * 2, countTraining);

                    return new CrossValidationValues<MulticlassSupportVectorMachine>(null, 0, 0);
                }
            };

            bool thrown = false;
            try { crossvalidation.Compute(); }
            catch (Exception) { thrown = true; }
            Assert.IsTrue(thrown);

            crossvalidation = new CrossValidation<MulticlassSupportVectorMachine>(labels, 2, folds: 10)
            {
                RunInParallel = false,

                Fitting = (int index, int[] indicesTrain, int[] indicesValidation) =>
                {
                    var labelsValidation = labels.Submatrix(indicesValidation);
                    int countValidation = labelsValidation.Count(x => x == 1);
                    Assert.AreEqual(1, countValidation);

                    var labelsTraining = labels.Submatrix(indicesTrain);
                    int countTraining = labelsTraining.Count(x => x == 1);
                    Assert.AreEqual(9, countTraining);

                    return new CrossValidationValues<MulticlassSupportVectorMachine>(null, 0, 0);
                }
            };

            crossvalidation.Compute();
        }
Ejemplo n.º 16
0
        private void Test_Load(object sender, EventArgs e)
        {
            // TODO: This line of code loads data into the 'diabetesDataSetB.ContinuousData' table. You can move, or remove it, as needed.
            this.continuousDataTableAdapter.Fill(this.diabetesDataSetB.ContinuousData);

            // This is a sample code on how to use Cross-Validation 
            // to access the performance of Support Vector Machines. 

            // Consider the example binary data. We will be trying 
            // to learn a XOR problem and see how well does SVMs 
            // perform on this data. 

            double[][] data =
{
    new double[] { -1, -1 }, new double[] {  1, -1 },
    new double[] { -1,  1 }, new double[] {  1,  1 },
    new double[] { -1, -1 }, new double[] {  1, -1 },
    new double[] { -1,  1 }, new double[] {  1,  1 },
    new double[] { -1, -1 }, new double[] {  1, -1 },
    new double[] { -1,  1 }, new double[] {  1,  1 },
    new double[] { -1, -1 }, new double[] {  1, -1 },
    new double[] { -1,  1 }, new double[] {  1,  1 },
};

            int[] xor = // result of xor for the sample input data
{
    -1,       1,
     1,      -1,
    -1,       1,
     1,      -1,
    -1,       1,
     1,      -1,
    -1,       1,
     1,      -1,
};


            // Create a new Cross-validation algorithm passing the data set size and the number of folds 
            var crossvalidation = new CrossValidation<KernelSupportVectorMachine>(size: data.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this 
            // function is to learn a SVM in the subset of the data dicted by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which 
                // should be considered training data and the indices of the original set 
                // which should be considered validation data. 

                // Lets now grab the training data: 
                var trainingInputs = data.Submatrix(indicesTrain);
                var trainingOutputs = xor.Submatrix(indicesTrain);

                // And now the validation data: 
                var validationInputs = data.Submatrix(indicesValidation);
                var validationOutputs = xor.Submatrix(indicesValidation);


                // Create a Kernel Support Vector Machine to operate on the set 
                var svm = new KernelSupportVectorMachine(new Polynomial(2), 2);

                // Create a training algorithm and learn the training data 
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data: 
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved. 
                return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError);
            };

            //crossvalidation.CreatePartitions(2, data,out xor);

            // Compute the cross-validation 
            var result = crossvalidation.Compute();

            // Finally, access the measured performance. 
            double trainingErrors = result.Training.Mean;
            double validationErrors = result.Validation.Mean;



        }
Ejemplo n.º 17
0
        private CrossValidationResult _performCrossValidation()
        {
            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation(size: _inputs.Length, folds: _configuration.CrossValidationNumFolds);

            Console.WriteLine("Num Samples: " + crossvalidation.Samples);


            // ConcurrentDictionary<string, NodeClassification> nodeClassifications = new ConcurrentDictionary<string, NodeClassification>();
            CrossValidationResult cvResult = new CrossValidationResult();

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.
            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs  = _inputs.Submatrix(indicesTrain);
                var trainingOutputs = _outputs.Submatrix(indicesTrain);
                var trainingNames   = _names.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = _inputs.Submatrix(indicesValidation);
                var validationOutputs = _outputs.Submatrix(indicesValidation);
                var validationNames   = _names.Submatrix(indicesValidation);

                if (validationNames.Intersect <String>(trainingNames).Count() > 0)
                {
                    Console.WriteLine("Warning, Training and Validation Set not disjunct.");
                    Utility.writeToConsole <String>(validationNames.Intersect(trainingNames).ToArray());
                }

                //Console.WriteLine("=== TRAINING ===");
                //Utility.writeToConsole<String>(trainingNames);
                //Utility.writeToConsole<int>(trainingInputs);
                //Utility.writeToConsole<int>(trainingOutputs);

                //Console.WriteLine("=== VALIDATION ===");
                //Utility.writeToConsole<String>(validationNames);
                //Utility.writeToConsole<double>(validationInputs);
                //Utility.writeToConsole<int>(validationOutputs);

                //Console.WriteLine("=== INTERSECTION ===");
                //String[] intersection = Utility.Intersection(validationNames, trainingNames);
                //Utility.writeToConsole<String>(intersection);

                // Create a Kernel Support Vector Machine to operate on the set
                var svm = new KernelSupportVectorMachine(_kernel, _inputs[0].Length);
                //Accord.MachineLearning.Boosting.Boost<KernelSupportVectorMachine> b;
                // Create a training algorithm and learn the training data
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs)
                {
                    // Set learning parameters
                    Tolerance              = _configuration.Tolerance,
                    PositiveWeight         = _configuration.WeightPositiveClass,
                    NegativeWeight         = _configuration.WeightNegativeClass,
                    UseClassProportions    = _configuration.UseComputedWeights,
                    UseComplexityHeuristic = true
                };

                //if (!_configuration.UseHeuristicalComplexity)
                _smo.Complexity = _configuration.Complexity;

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data:
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Predictions & Confusion Matrix
                List <int>    predictions    = new List <int>();
                List <double> rawPredictions = new List <double>();

                int index = 0;
                foreach (double[] inputVector in validationInputs)
                {
                    // Compute the decision output for vector
                    // Console.WriteLine(validationNames[index]);
                    // Utility.writeToConsole<double>(inputVector);
                    double rawPrediction = svm.Compute(inputVector);
                    rawPredictions.Add(rawPrediction);

                    int prediction = rawPrediction > 0.0d ? +1 : -1;
                    predictions.Add(prediction);

                    try
                    {
                        // Update Node Classifications
                        cvResult.AddOrUpdateClassification(validationNames[index], rawPrediction, validationOutputs[index]);
                    }
                    catch (IndexOutOfRangeException)
                    {
                        Console.WriteLine("Failed to update CV Result: Input Vector larger than Name Vector.");
                    }

                    index++;
                }

                ConfusionMatrix confusionMatrix = new ConfusionMatrix(predictions.ToArray(), validationOutputs, 1, -1);
                cvResult.ConfusionMatrices.Add(confusionMatrix);

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues(svm, trainingError, validationError));
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors   = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            Console.WriteLine("Training Errors: " + result.Training.Mean);
            Console.WriteLine("Validation Errors: " + result.Validation.Mean);

            ConfusionMatrix aggregatedConfusionMatrix = ConfusionMatrix.Combine(cvResult.ConfusionMatrices.ToArray());

            return(cvResult);
        }
Ejemplo n.º 18
0
        private void Test_Load(object sender, EventArgs e)
        {
            // TODO: This line of code loads data into the 'diabetesDataSetB.ContinuousData' table. You can move, or remove it, as needed.
            this.continuousDataTableAdapter.Fill(this.diabetesDataSetB.ContinuousData);

            // This is a sample code on how to use Cross-Validation
            // to access the performance of Support Vector Machines.

            // Consider the example binary data. We will be trying
            // to learn a XOR problem and see how well does SVMs
            // perform on this data.

            double[][] data =
            {
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
                new double[] { -1, -1 }, new double[] { 1, -1 },
                new double[] { -1,  1 }, new double[] { 1,  1 },
            };

            int[] xor = // result of xor for the sample input data
            {
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
                -1,  1,
                1,  -1,
            };


            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation <KernelSupportVectorMachine>(size: data.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data dicted by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs  = data.Submatrix(indicesTrain);
                var trainingOutputs = xor.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = data.Submatrix(indicesValidation);
                var validationOutputs = xor.Submatrix(indicesValidation);


                // Create a Kernel Support Vector Machine to operate on the set
                var svm = new KernelSupportVectorMachine(new Polynomial(2), 2);

                // Create a training algorithm and learn the training data
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data:
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues <KernelSupportVectorMachine>(svm, trainingError, validationError));
            };

            //crossvalidation.CreatePartitions(2, data,out xor);

            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors   = result.Training.Mean;
            double validationErrors = result.Validation.Mean;
        }
Ejemplo n.º 19
0
        public void CrossvalidationConstructorTest2()
        {

            Accord.Math.Tools.SetupGenerator(0);

            // This is a sample code on how to use Cross-Validation
            // to assess the performance of Hidden Markov Models.

            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,1,1,0 },   // Class 0
                new int[] { 0,0,1,0 },   // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,1,0 },   // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,0,1,0 }, // Class 0
                new int[] { 0,1,0 },     // Class 0
                new int[] { 0,1,1,0 },   // Class 0

                new int[] { 1,0,0,1 },   // Class 1
                new int[] { 1,1,0,1 },   // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
                new int[] { 1,0,1 },     // Class 1
                new int[] { 1,1,0,1 },   // Class 1
                new int[] { 1,0,1 },     // Class 1
                new int[] { 1,0,0,1 },   // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
                new int[] { 1,0,1 },     // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0,0,0,0,0,0,0, // First 10 sequences are of class 0
                1,1,1,1,1,1,1,1,1,1, // Last 10 sequences are of class 1
            };



            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation<HiddenMarkovClassifier>(size: inputs.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);


                // We are trying to predict two different classes
                int classes = 2;

                // Each sequence may have up to two symbols (0 or 1)
                int symbols = 2;

                // Nested models will have two states each
                int[] states = new int[] { 2, 2 };

                // Creates a new Hidden Markov Model Classifier with the given parameters
                HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


                // Create a new learning algorithm to train the sequence classifier
                var teacher = new HiddenMarkovClassifierLearning(classifier,

                    // Train each model until the log-likelihood changes less than 0.001
                    modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                    {
                        Tolerance = 0.001,
                        Iterations = 0
                    }
                );

                // Train the sequence classifier using the algorithm
                double likelihood = teacher.Run(trainingInputs, trainingOutputs);

                double trainingError = teacher.ComputeError(trainingInputs, trainingOutputs);

                // Now we can compute the validation error on the validation data:
                double validationError = teacher.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return new CrossValidationValues<HiddenMarkovClassifier>(classifier, trainingError, validationError);
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            Assert.AreEqual(3, crossvalidation.K);
            Assert.AreEqual(0, result.Training.Mean);
            Assert.AreEqual(0.055555555555555552, result.Validation.Mean);

            Assert.AreEqual(3, crossvalidation.Folds.Length);
            Assert.AreEqual(3, result.Models.Length);
        }
Ejemplo n.º 20
0
        public void KNearestNeighbor_CrossValidation()
        {
            // Create some sample learning data. In this data,
            // the first two instances belong to a class, the
            // four next belong to another class and the last
            // three to yet another.

            double[][] inputs =
            {
                // The first two are from class 0
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },

                // The next four are from class 1
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },

                // The last three are from class 2
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] outputs =
            {
                0, 0,       // First two from class 0
                1, 1, 1, 1, // Next four from class 1
                2, 2, 2     // Last three from class 2
            };



            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation(size: inputs.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs  = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs  = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);

                // Now we will create the K-Nearest Neighbors algorithm. For this
                // example, we will be choosing k = 4. This means that, for a given
                // instance, its nearest 4 neighbors will be used to cast a decision.
                KNearestNeighbors knn = new KNearestNeighbors(k: 4, classes: 3,
                                                              inputs: inputs, outputs: outputs);


                // After the algorithm has been created, we can classify instances:
                int[] train_predicted = trainingInputs.Apply(knn.Compute);
                int[] test_predicted  = validationInputs.Apply(knn.Compute);

                // Compute classification error
                var    cmTrain     = new ConfusionMatrix(train_predicted, trainingOutputs);
                double trainingAcc = cmTrain.Accuracy;

                // Now we can compute the validation error on the validation data:
                var    cmTest        = new ConfusionMatrix(test_predicted, validationOutputs);
                double validationAcc = cmTest.Accuracy;

                // Return a new information structure containing the model and the errors achieved.
                return(new CrossValidationValues(knn, trainingAcc, validationAcc));
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingAccs   = result.Training.Mean;
            double validationAccs = result.Validation.Mean;


            Assert.AreEqual(1, trainingAccs);
            Assert.AreEqual(1, validationAccs);
        }
Ejemplo n.º 21
0
        public void KNearestNeighbor_CrossValidation()
        {
            // Create some sample learning data. In this data,
            // the first two instances belong to a class, the
            // four next belong to another class and the last
            // three to yet another.

            double[][] inputs = 
            {
                // The first two are from class 0
                new double[] { -5, -2, -1 },
                new double[] { -5, -5, -6 },

                // The next four are from class 1
                new double[] {  2,  1,  1 },
                new double[] {  1,  1,  2 },
                new double[] {  1,  2,  2 },
                new double[] {  3,  1,  2 },

                // The last three are from class 2
                new double[] { 11,  5,  4 },
                new double[] { 15,  5,  6 },
                new double[] { 10,  5,  6 },
            };

            int[] outputs =
            {
                0, 0,        // First two from class 0
                1, 1, 1, 1,  // Next four from class 1
                2, 2, 2      // Last three from class 2
            };



            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation(size: inputs.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs = inputs.Submatrix(indicesTrain);
                var trainingOutputs = outputs.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs = inputs.Submatrix(indicesValidation);
                var validationOutputs = outputs.Submatrix(indicesValidation);

                // Now we will create the K-Nearest Neighbors algorithm. For this
                // example, we will be choosing k = 4. This means that, for a given
                // instance, its nearest 4 neighbors will be used to cast a decision.
                KNearestNeighbors knn = new KNearestNeighbors(k: 4, classes: 3,
                    inputs: inputs, outputs: outputs);


                // After the algorithm has been created, we can classify instances:
                int[] train_predicted = trainingInputs.Apply(knn.Compute);
                int[] test_predicted = validationInputs.Apply(knn.Compute);

                // Compute classification error
                var cmTrain = new ConfusionMatrix(train_predicted, trainingOutputs);
                double trainingAcc = cmTrain.Accuracy;

                // Now we can compute the validation error on the validation data:
                var cmTest = new ConfusionMatrix(test_predicted, validationOutputs);
                double validationAcc = cmTest.Accuracy;

                // Return a new information structure containing the model and the errors achieved.
                return new CrossValidationValues(knn, trainingAcc, validationAcc);
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingAccs = result.Training.Mean;
            double validationAccs = result.Validation.Mean;


            Assert.AreEqual(1, trainingAccs);
            Assert.AreEqual(1, validationAccs);
        }
Ejemplo n.º 22
0
        public override Task <GridSearchParameterCollection> GridSearchAsync(ClassificationModel classificationModel)
        {
            return(Task.Factory.StartNew(() =>
            {
                // Declare the parameters and ranges to be searched
                List <GridSearchRange> ranges = new List <GridSearchRange>
                {
                    new GridSearchRange("complexity", new[] { 150.0, 100.0, 50, 10, 1 }),
                };

                switch (Kernel)
                {
                case Kernel.Gaussian:
                    ranges.Add(new GridSearchRange("gamma", new[] { 0.1, 1.0, 2.0, 5.0, 10.0, 20.0 }));
                    break;

                case Kernel.Polynomial:
                    ranges.Add(new GridSearchRange("degree", new[] { 1.0, 2.0, 3.0, 4.0 }));
                    break;
                }

                int numFeatures = classificationModel.FeatureVectors.Count;

                double[][] input = new double[numFeatures][];
                int[] responses = new int[numFeatures];

                for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex)
                {
                    var featureVector = classificationModel.FeatureVectors[featureIndex];

                    input[featureIndex] = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue);
                    responses[featureIndex] = (int)featureVector.FeatureClass;
                }

                // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines
                var gridsearch = new GridSearch <MulticlassSupportVectorMachine <Gaussian> >(ranges.ToArray());

                // Set the fitting function for the algorithm
                gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error)
                {
                    // The parameters to be tried will be passed as a function parameter.
                    double complexity = parameters["complexity"].Value;
                    double gamma = parameters.Contains("gamma") ? parameters["gamma"].Value : 0;
                    int degree = parameters.Contains("degree") ? (int)parameters["degree"].Value : 0;

                    // Create a new Cross-validation algorithm passing the data set size and the number of folds
                    var crossvalidation = new CrossValidation(size: input.Length, folds: 10);

                    // Define a fitting function using Support Vector Machines. The objective of this
                    // function is to learn a SVM in the subset of the data indicated by cross-validation.
                    crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
                    {
                        // Lets now grab the training data:
                        var trainingInputs = input.Get(indicesTrain);
                        var trainingOutputs = responses.Get(indicesTrain);

                        // And now the validation data:
                        var validationInputs = input.Get(indicesValidation);
                        var validationOutputs = responses.Get(indicesValidation);

                        int[] predictedTraining;
                        int[] predictedValidation;
                        TrainAndPredict(complexity, gamma, degree, trainingInputs, trainingOutputs, validationInputs, out predictedTraining, out predictedValidation);

                        double trainingError = new ZeroOneLoss(trainingOutputs).Loss(predictedTraining);
                        double validationError = new ZeroOneLoss(validationOutputs).Loss(predictedValidation);

                        // Return a new information structure containing the model and the errors achieved.
                        return new CrossValidationValues(trainingError, validationError);
                    };

                    // Compute the cross-validation
                    var result = crossvalidation.Compute();

                    // Finally, access the measured performance.
                    double trainingErrors = result.Training.Mean;
                    double validationErrors = result.Validation.Mean;

                    error = validationErrors;

                    return null; // Return the current model
                };


                // Declare some out variables to pass to the grid search algorithm
                GridSearchParameterCollection bestParameters;
                double minError;

                // Compute the grid search to find the best Support Vector Machine
                gridsearch.Compute(out bestParameters, out minError);

                return bestParameters;
            }));
        }
Ejemplo n.º 23
0
        public void CrossvalidationConstructorTest()
        {

            Accord.Math.Tools.SetupGenerator(0);

            // This is a sample code on how to use Cross-Validation
            // to assess the performance of Support Vector Machines.

            // Consider the example binary data. We will be trying
            // to learn a XOR problem and see how well does SVMs
            // perform on this data.

            double[][] data =
            {
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
                new double[] { -1, -1 }, new double[] {  1, -1 },
                new double[] { -1,  1 }, new double[] {  1,  1 },
            };

            int[] xor = // result of xor for the sample input data
            {
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
                -1,       1,
                 1,      -1,
            };


            // Create a new Cross-validation algorithm passing the data set size and the number of folds
            var crossvalidation = new CrossValidation<KernelSupportVectorMachine>(size: data.Length, folds: 3);

            // Define a fitting function using Support Vector Machines. The objective of this
            // function is to learn a SVM in the subset of the data indicated by cross-validation.

            crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation)
            {
                // The fitting function is passing the indices of the original set which
                // should be considered training data and the indices of the original set
                // which should be considered validation data.

                // Lets now grab the training data:
                var trainingInputs = data.Submatrix(indicesTrain);
                var trainingOutputs = xor.Submatrix(indicesTrain);

                // And now the validation data:
                var validationInputs = data.Submatrix(indicesValidation);
                var validationOutputs = xor.Submatrix(indicesValidation);


                // Create a Kernel Support Vector Machine to operate on the set
                var svm = new KernelSupportVectorMachine(new Polynomial(2), 2);

                // Create a training algorithm and learn the training data
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                double trainingError = smo.Run();

                // Now we can compute the validation error on the validation data:
                double validationError = smo.ComputeError(validationInputs, validationOutputs);

                // Return a new information structure containing the model and the errors achieved.
                return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError);
            };


            // Compute the cross-validation
            var result = crossvalidation.Compute();

            // Finally, access the measured performance.
            double trainingErrors = result.Training.Mean;
            double validationErrors = result.Validation.Mean;

            Assert.AreEqual(3, crossvalidation.K);
            Assert.AreEqual(0, result.Training.Mean);
            Assert.AreEqual(0, result.Validation.Mean);

            Assert.AreEqual(3, crossvalidation.Folds.Length);
            Assert.AreEqual(3, result.Models.Length);
        }
Ejemplo n.º 24
0
        public void FittingTest()
        {

            int[] folds = CrossValidation.Splittings(100, 10);

            int[] samples = Matrix.Indices(0, 100);

            CrossValidation val = new CrossValidation(folds, 10);

            val.RunInParallel = false;

            int current = 0;
            val.Fitting = (k, trainingSamples, validationSamples) =>
            {
                Assert.AreEqual(current, k);
                Assert.AreEqual(90, trainingSamples.Length);
                Assert.AreEqual(10, validationSamples.Length);

                int[] trainingSet = samples.Submatrix(trainingSamples);
                int[] validationSet = samples.Submatrix(validationSamples);

                for (int i = 0; i < trainingSet.Length; i++)
                    Assert.AreEqual(samples[trainingSamples[i]], trainingSet[i]);

                for (int i = 0; i < validationSet.Length; i++)
                    Assert.AreEqual(samples[validationSamples[i]], validationSet[i]);

                current++;

                return new CrossValidationValues<object>(new object(), k, 2 * k);
            };

            var result = val.Compute();

            Assert.AreEqual(10, current);
            Assert.AreEqual(4.5, result.Training.Mean);
            Assert.AreEqual(9.0, result.Validation.Mean);
            Assert.AreEqual(
                2 * result.Training.StandardDeviation,      
                result.Validation.StandardDeviation);

            Assert.AreEqual(val.Folds.Length, result.Training.Sizes.Length);
            Assert.AreEqual(val.Folds.Length, result.Validation.Sizes.Length);

            for (int i = 0; i < result.Training.Sizes.Length; i++)
                Assert.AreEqual(90, result.Training.Sizes[i]);

            for (int i = 0; i < result.Validation.Sizes.Length; i++)
                Assert.AreEqual(10, result.Validation.Sizes[i]);
        }
Ejemplo n.º 25
0
        private void button1_Click(object sender, EventArgs e)
        {
            // Creates a matrix from the source data table
            double[,] sourceMatrix = (dgvLearningSource.DataSource as DataTable).ToMatrix(out sourceColumns);

            // Get only the input vector values
            var inputs = sourceMatrix.Submatrix(0, sourceMatrix.GetLength(0) - 1, 0, 1).ToArray();

            // Get only the label outputs
            var outputs = new int[sourceMatrix.GetLength(0)];
            for (int i = 0; i < outputs.Length; i++)
                outputs[i] = (int)sourceMatrix[i, 2];

            var cv = new CrossValidation<KernelSupportVectorMachine>(inputs.Length, 10);
            cv.Fitting = (int k, int[] training, int[] testing) =>
            {
                var trainingInputs = inputs.Submatrix(training);
                var trainingOutputs = outputs.Submatrix(training);
                var testingInputs = inputs.Submatrix(testing);
                var testingOutputs = outputs.Submatrix(testing);

                // Create the specified Kernel
                IKernel kernel = getKernel();


                // Creates the Support Vector Machine using the selected kernel
                var svm = new KernelSupportVectorMachine(kernel, 2);

                // Creates a new instance of the SMO Learning Algortihm
                var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs);

                // Set learning parameters
                smo.Complexity = (double)numC.Value;
                smo.Tolerance = (double)numT.Value;

                // Run
                double trainingError = smo.Run();
                double validationError = smo.ComputeError(testingInputs, testingOutputs);

                return new CrossValidationValues<KernelSupportVectorMachine>(svm, trainingError, validationError);

            };

            var result = cv.Compute();

        }