public RandomForest GetBestRandomForestsWithGridSearch(AppIdentAcordSource appIdentAcordSource, out GridSearchParameterCollection bestParameters, out double minError) { // grid search ranges (parameter values) GridSearchRange[] parameterRanges = { new GridSearchRange("trees", new double[] { //1, //3, //5, //8, 11, 13, 17, 19, 37 }), new GridSearchRange("sampleRatio", new[] { // 0.7, 0.8, // 0.9 }), new GridSearchRange("join", new double[] { //25, //50, //100, 150, 200, 250, 300 }) }; var samples = appIdentAcordSource.Samples; var labels = appIdentAcordSource.LabelsAsIntegers; var decisionVariables = appIdentAcordSource.DecisionVariables; // instantiate grid search algorithm for a CLF model var gridSearch = new GridSearch <RandomForest>(parameterRanges) { Fitting = delegate(GridSearchParameterCollection parameters, out double error) { Console.WriteLine($"{DateTime.Now} RandomForest grid search."); // Use the parameters to build the model // Create a new learning algorithm var rfcModel = CreateRandomForestModel(decisionVariables, parameters, samples, labels); // Measure the model performance to return as an out parameter error = new ZeroOneLoss(labels).Loss(rfcModel.Decide(samples)); // Return the current model return(rfcModel); } //,ParallelOptions = new ParallelOptions() { MaxDegreeOfParallelism = 1 } }; // Compute the grid search to find the best RandomForest model return(gridSearch.Compute(out bestParameters, out minError)); }
public static void GridSearch(double[][] inputs, int[] outputs) { GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 0.001, 5.20, 0.30, 0.50, 20, 50, 100, 100 }), new GridSearchRange("degree", new double[] { 1, 2, 3, 4, 5,10 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }), new GridSearchRange("sigma", new double[] { 0.1, 0.25, 0.5, 1, 2, 5 }) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <KernelSupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; double sigma = parameters["sigma"].Value; // Use the parameters to build the SVM model //Polynomial kernel = new Polynomial(degree, constant); //Gaussian kernel = new Gaussian(sigma); Gaussian kernel = new Gaussian(sigma); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, outputs); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(ksvm); // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); }
public void GridsearchConstructorTest() { Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 }), new GridSearchRange("degree", new double[] { 1, 10, 2,3, 4, 5 }), new GridSearchRange("constant", new double[] { 0, 1, 2 }) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <KernelSupportVectorMachine>(ranges); #if DEBUG gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; #endif // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model Polynomial kernel = new Polynomial(degree, constant); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(ksvm); // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // A linear kernel can't solve the xor problem. Assert.AreEqual(1, bestParameters["degree"].Value); Assert.AreEqual(1, bestParameters["constant"].Value); Assert.AreEqual(1e-8, bestParameters["complexity"].Value); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.0); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 3); }
public void GridsearchConstructorTest2() { Accord.Math.Random.Generator.Seed = 0; // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 1000000, 0.50 }), }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <SupportVectorMachine>(ranges); gridsearch.ParallelOptions.MaxDegreeOfParallelism = 1; // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model SupportVectorMachine svm = new SupportVectorMachine(2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return(svm); // Return the current model }; { // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine SupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.5); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 1); } { // Compute the grid search to find the best Support Vector Machine var result = gridsearch.Compute(); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(result.Error, 0.5); Assert.IsNotNull(result.Model); Assert.AreEqual(5, result.Errors.Length); Assert.AreEqual(5, result.Models.Length); } }
public override Task <GridSearchParameterCollection> GridSearchAsync(ClassificationModel classificationModel) { return(Task.Factory.StartNew(() => { // Declare the parameters and ranges to be searched List <GridSearchRange> ranges = new List <GridSearchRange> { new GridSearchRange("complexity", new[] { 150.0, 100.0, 50, 10, 1 }), }; switch (Kernel) { case Kernel.Gaussian: ranges.Add(new GridSearchRange("gamma", new[] { 0.1, 1.0, 2.0, 5.0, 10.0, 20.0 })); break; case Kernel.Polynomial: ranges.Add(new GridSearchRange("degree", new[] { 1.0, 2.0, 3.0, 4.0 })); break; } int numFeatures = classificationModel.FeatureVectors.Count; double[][] input = new double[numFeatures][]; int[] responses = new int[numFeatures]; for (int featureIndex = 0; featureIndex < classificationModel.FeatureVectors.Count; ++featureIndex) { var featureVector = classificationModel.FeatureVectors[featureIndex]; input[featureIndex] = Array.ConvertAll(featureVector.FeatureVector.BandIntensities, s => (double)s / ushort.MaxValue); responses[featureIndex] = (int)featureVector.FeatureClass; } // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch <MulticlassSupportVectorMachine <Gaussian> >(ranges.ToArray()); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. double complexity = parameters["complexity"].Value; double gamma = parameters.Contains("gamma") ? parameters["gamma"].Value : 0; int degree = parameters.Contains("degree") ? (int)parameters["degree"].Value : 0; // Create a new Cross-validation algorithm passing the data set size and the number of folds var crossvalidation = new CrossValidation(size: input.Length, folds: 10); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data indicated by cross-validation. crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation) { // Lets now grab the training data: var trainingInputs = input.Get(indicesTrain); var trainingOutputs = responses.Get(indicesTrain); // And now the validation data: var validationInputs = input.Get(indicesValidation); var validationOutputs = responses.Get(indicesValidation); int[] predictedTraining; int[] predictedValidation; TrainAndPredict(complexity, gamma, degree, trainingInputs, trainingOutputs, validationInputs, out predictedTraining, out predictedValidation); double trainingError = new ZeroOneLoss(trainingOutputs).Loss(predictedTraining); double validationError = new ZeroOneLoss(validationOutputs).Loss(predictedValidation); // Return a new information structure containing the model and the errors achieved. return new CrossValidationValues(trainingError, validationError); }; // Compute the cross-validation var result = crossvalidation.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; error = validationErrors; return null; // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine gridsearch.Compute(out bestParameters, out minError); return bestParameters; })); }
public void GridsearchConstructorTest() { Accord.Math.Tools.SetupGenerator(0); // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 0.50 } ), new GridSearchRange("degree", new double[] { 1, 10, 2, 3, 4, 5 } ), new GridSearchRange("constant", new double[] { 0, 1, 2 } ) }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch<KernelSupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. int degree = (int)parameters["degree"].Value; double constant = parameters["constant"].Value; double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model Polynomial kernel = new Polynomial(degree, constant); KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(kernel, 2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(ksvm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return ksvm; // Return the current model }; // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine KernelSupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // A linear kernel can't solve the xor problem. Assert.AreNotEqual((int)bestParameters["degree"].Value, 1); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.0); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 3); }
public void GridsearchConstructorTest2() { Accord.Math.Tools.SetupGenerator(0); // Example binary data double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = // xor labels { -1, 1, 1, -1 }; // Declare the parameters and ranges to be searched GridSearchRange[] ranges = { new GridSearchRange("complexity", new double[] { 0.00000001, 5.20, 0.30, 1000000, 0.50 } ), }; // Instantiate a new Grid Search algorithm for Kernel Support Vector Machines var gridsearch = new GridSearch<SupportVectorMachine>(ranges); // Set the fitting function for the algorithm gridsearch.Fitting = delegate(GridSearchParameterCollection parameters, out double error) { // The parameters to be tried will be passed as a function parameter. double complexity = parameters["complexity"].Value; // Use the parameters to build the SVM model SupportVectorMachine svm = new SupportVectorMachine(2); // Create a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, xor); smo.Complexity = complexity; // Measure the model performance to return as an out parameter error = smo.Run(); return svm; // Return the current model }; { // Declare some out variables to pass to the grid search algorithm GridSearchParameterCollection bestParameters; double minError; // Compute the grid search to find the best Support Vector Machine SupportVectorMachine bestModel = gridsearch.Compute(out bestParameters, out minError); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(minError, 0.5); Assert.IsNotNull(bestModel); Assert.IsNotNull(bestParameters); Assert.AreEqual(bestParameters.Count, 1); } { // Compute the grid search to find the best Support Vector Machine var result = gridsearch.Compute(); // The minimum error should be zero because the problem is well-known. Assert.AreEqual(result.Error, 0.5); Assert.IsNotNull(result.Model); Assert.AreEqual(5, result.Errors.Length); Assert.AreEqual(5, result.Models.Length); } }