public void TrainTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Example regression problem. Suppose we are trying
            // to model the following equation: f(x, y) = 2x + y

            double[][] inputs = // (x, y)
            {
                new double[] { 0,  1 }, // 2*0 + 1 =  1
                new double[] { 4,  3 }, // 2*4 + 3 = 11
                new double[] { 8, -8 }, // 2*8 - 8 =  8
                new double[] { 2,  2 }, // 2*2 + 2 =  6
                new double[] { 6,  1 }, // 2*6 + 1 = 13
                new double[] { 5,  4 }, // 2*5 + 4 = 14
                new double[] { 9,  1 }, // 2*9 + 1 = 19
                new double[] { 1,  6 }, // 2*1 + 6 =  8
            };

            double[] outputs = // f(x, y)
            {
                    1, 11, 8, 6, 13, 14, 19, 8
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            var machine = new KernelSupportVectorMachine(new Polynomial(2), inputs: 2);

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimizationRegression(machine, inputs, outputs)
            {
                Complexity = 100
            };

            // Run the learning algorithm
            double error = learn.Run();

            // Compute the answer for one particular example
            double fxy = machine.Compute(inputs[0]); // 1.0003849827673186

            // Check for correct answers
            double[] answers = new double[inputs.Length];
            for (int i = 0; i < answers.Length; i++)
                answers[i] = machine.Compute(inputs[i]);

            Assert.AreEqual(1.0, fxy, 1e-2);
            for (int i = 0; i < outputs.Length; i++)
                Assert.AreEqual(outputs[i], answers[i], 1e-2);
        }
Esempio n. 2
0
        private void btnCreate_Click(object sender, EventArgs e)
        {
            if (dgvLearningSource.DataSource == null)
            {
                MessageBox.Show("Please load some data first.");
                return;
            }

            // Finishes and save any pending changes to the given data
            dgvLearningSource.EndEdit();



            // Creates a matrix from the entire source data table
            double[,] table = (dgvLearningSource.DataSource as DataTable).ToMatrix(out columnNames);

            // Get only the input vector values (first two columns)
            double[][] inputs = table.GetColumns(0).ToArray();

            // Get only the outputs (last column)
            double[] outputs = table.GetColumn(1);


            // Create the specified Kernel
            IKernel kernel = createKernel();


            // Create the Support Vector Machine for 1 input variable
            svm = new KernelSupportVectorMachine(kernel, inputs: 1);

            // Creates a new instance of the SMO for regression learning algorithm
            var smo = new SequentialMinimalOptimizationRegression(svm, inputs, outputs)
            {
                // Set learning parameters
                Complexity = (double)numC.Value,
                Tolerance = (double)numT.Value,
                Epsilon = (double)numEpsilon.Value
            };



            try
            {
                // Run
                double error = smo.Run();

                lbStatus.Text = "Training complete!";
            }
            catch (ConvergenceException)
            {
                lbStatus.Text = "Convergence could not be attained. " +
                    "The learned machine might still be usable.";
            }



            // Check if we got support vectors
            if (svm.SupportVectors.Length == 0)
            {
                dgvSupportVectors.DataSource = null;
                graphSupportVectors.GraphPane.CurveList.Clear();
                return;
            }



            // Show support vectors on the Support Vectors tab page
            double[][] supportVectorsWeights = svm.SupportVectors.InsertColumn(svm.Weights);

            string[] supportVectorNames = columnNames.RemoveAt(columnNames.Length - 1).Concatenate("Weight");
            dgvSupportVectors.DataSource = new ArrayDataView(supportVectorsWeights, supportVectorNames);



            // Show the support vector labels on the scatter plot
            double[] supportVectorLabels = new double[svm.SupportVectors.Length];
            for (int i = 0; i < supportVectorLabels.Length; i++)
            {
                int j = inputs.Find(sv => sv == svm.SupportVectors[i])[0];
                supportVectorLabels[i] = outputs[j];
            }

            double[][] graph = svm.SupportVectors.InsertColumn(supportVectorLabels);

            CreateScatterplot(graphSupportVectors, graph.ToMatrix());



            // Get the ranges for each variable (X and Y)
            DoubleRange range = Matrix.Range(table.GetColumn(0));

            double[][] map = Matrix.Interval(range, 0.05).ToArray();

            // Classify each point in the Cartesian coordinate system
            double[] result = map.Apply(svm.Compute);
            double[,] surface = map.ToMatrix().InsertColumn(result);

            CreateScatterplot(zedGraphControl2, surface);
        }