public void RunTest() { // Example regression problem. Suppose we are trying // to model the following equation: f(x, y) = 2x + y double[][] inputs = // (x, y) { new double[] { 0, 1 }, // 2*0 + 1 = 1 new double[] { 4, 3 }, // 2*4 + 3 = 11 new double[] { 8, -8 }, // 2*8 - 8 = 8 new double[] { 2, 2 }, // 2*2 + 2 = 6 new double[] { 6, 1 }, // 2*6 + 1 = 13 new double[] { 5, 4 }, // 2*5 + 4 = 14 new double[] { 9, 1 }, // 2*9 + 1 = 19 new double[] { 1, 6 }, // 2*1 + 6 = 8 }; double[] outputs = // f(x, y) { 1, 11, 8, 6, 13, 14, 19, 8 }; // Create a new linear Support Vector Machine var machine = new SupportVectorMachine(inputs: 2); // Create the linear regression coordinate descent teacher var learn = new LinearRegressionNewtonMethod(machine, inputs, outputs) { Complexity = 100000000, Epsilon = 1e-15, Tolerance = 1e-15, }; // Run the learning algorithm double error = learn.Run(); Assert.AreEqual(0.0000000000000000030183002120114053, error); // Compute the answer for one particular example double fxy = machine.Compute(inputs[0]); // 1.0003849827673186 // Check for correct answers double[] answers = new double[inputs.Length]; for (int i = 0; i < answers.Length; i++) { answers[i] = machine.Compute(inputs[i]); } Assert.AreEqual(1.0, fxy, 1e-5); for (int i = 0; i < outputs.Length; i++) { Assert.AreEqual(outputs[i], answers[i], 1e-5); } }
public static void predict(SupportVectorMachine network, SupportVectorMachine network2) { Console.WriteLine(@"Year\tActual\tPredict\tClosed Loops"); for (int year = EVALUATE_START; year < EVALUATE_END; year++) { // calculate based on actual data IMLData input = new BasicMLData(WINDOW_SIZE); for (int i = 0; i < input.Count; i++) { input.Data[i] = normalizedSunspots[(year - WINDOW_SIZE) + i]; //input.setData(i,this.normalizedSunspots[(year-WINDOW_SIZE)+i]); } IMLData output = network.Compute(input); IMLData output2 = network2.Compute(input); double prediction = output.Data[0]; double prediction2 = output2.Data[0]; closedLoopSunspots[year] = prediction; // calculate "closed loop", based on predicted data for (int i = 0; i < input.Count; i++) { input.Data[i] = closedLoopSunspots[(year - WINDOW_SIZE) + i]; //input.setData(i,this.closedLoopSunspots[(year-WINDOW_SIZE)+i]); } output = network.Compute(input); double closedLoopPrediction = output[0]; IMLData output3 = network2.Compute(input); double closedLoopPrediction2 = output[0]; // display //System.out.println((STARTING_YEAR+year) // +"\t"+f.format(this.normalizedSunspots[year]) // +"\t"+f.format(prediction) // +"\t"+f.format(closedLoopPrediction) Console.WriteLine(((STARTING_YEAR + year) + @"\t " + Format.FormatDouble(SUNSPOTS[year], 4) + @"\t " + Format.FormatDouble(normalizedSunspots[year], 4) + @"\t " + Format.FormatDouble(prediction, 4) + @"\t " + Format.FormatDouble(prediction2, 4) + @"\t " + Format.FormatDouble(closedLoopPrediction, 4) + @"\t " + Format.FormatDouble(closedLoopPrediction2, 4) )); } }
public TimeSeries Predict(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData) { double[] data = GenerateData(simulatedData); int data_count = simulatedData.Count; TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); ts.Add(simulatedData.TimeStamp(idx), prediction, false); } return(ts); }
/// <summary> /// Computes the error rate for a given set of input and outputs. /// </summary> /// public double ComputeError(double[][] inputs, int[] expectedOutputs) { // Compute errors int count = 0; for (int i = 0; i < inputs.Length; i++) { double output; double actual = machine.Compute(inputs[i], out output); double expected = expectedOutputs[i]; if (Double.IsNaN(actual)) { Trace.WriteLine("SVM has produced NaNs"); } bool a = actual >= 0; bool b = expected >= 0; if (a != b) { count++; } } // Return misclassification error ratio return(count / (double)inputs.Length); }
public void Execute(IExampleInterface app) { // create a neural network, without using a factory var svm = new SupportVectorMachine(1, true); // 1 input, & true for regression // create training data IMLDataSet trainingSet = new BasicMLDataSet(RegressionInput, RegressionIdeal); // train the SVM IMLTrain train = new SVMSearchTrain(svm, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); // test the SVM Console.WriteLine(@"SVM Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = svm.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } }
public void Run() { // Example AND problem double[][] inputs = { new double[] { 0, 0 }, // 0 and 0: 0 (label -1) new double[] { 0, 1 }, // 0 and 1: 0 (label -1) new double[] { 1, 0 }, // 1 and 0: 0 (label -1) new double[] { 1, 1 } // 1 and 1: 1 (label +1) }; // Dichotomy SVM outputs should be given as [-1;+1] int[] labels = { // 0, 0, 0, 1 -1, -1, -1, 1 }; // Create a Support Vector Machine for the given inputs SupportVectorMachine machine = new SupportVectorMachine(inputs[0].Length); // Instantiate a new learning algorithm for SVMs SequentialMinimalOptimization smo = new SequentialMinimalOptimization(machine, inputs, labels); // Set up the learning algorithm smo.Complexity = 1.0; // Run the learning algorithm double error = smo.Run(); // Compute the decision output for one of the input vectors int decision = System.Math.Sign(machine.Compute(inputs[0])); }
static void Main(string[] args) { // create a neural network, without using a factory var svm = new SupportVectorMachine(2, false); // 2 input, & false for classification // create training data IMLDataSet trainingSet = new BasicMLDataSet(ClassificationInput, ClassificationIdeal); // train the SVM IMLTrain train = new SVMSearchTrain(svm, trainingSet); int epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); // test the SVM Console.WriteLine(@"SVM Results:"); foreach (IMLDataPair pair in trainingSet) { IMLData output = svm.Compute(pair.Input); Console.WriteLine(pair.Input[0] + @", actual=" + output[0] + @",ideal=" + pair.Ideal[0]); } Console.WriteLine("Done"); }
public void ComputeTest5() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); double[][] inputs = dataset.Submatrix(null, 0, 1).ToJagged(); int[] labels = dataset.GetColumn(2).ToInt32(); var kernel = new Polynomial(2, 1); Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearCoordinateDescent(machine, projection, labels) { Complexity = 1000000, Tolerance = 1e-15 }; double error = smo.Run(); Assert.AreEqual(1000000.0, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(projection[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(6, matrix.FalseNegatives); Assert.AreEqual(7, matrix.FalsePositives); Assert.AreEqual(44, matrix.TruePositives); Assert.AreEqual(43, matrix.TrueNegatives); }
static void Main(string[] args) { double[][] inputs = { // Those are from class -1 new double[] { 2, 4, 0 }, new double[] { 5, 5, 1 }, new double[] { 4, 5, 0 }, new double[] { 2, 5, 5 }, new double[] { 4, 5, 1 }, new double[] { 4, 5, 0 }, new double[] { 6, 2, 0 }, new double[] { 4, 1, 0 }, // Those are from class +1 new double[] { 1, 4, 5 }, new double[] { 7, 5, 1 }, new double[] { 2, 6, 0 }, new double[] { 7, 4, 7 }, new double[] { 4, 5, 0 }, new double[] { 6, 2, 9 }, new double[] { 4, 1, 6 }, new double[] { 7, 2, 9 }, }; int[] outputs = { -1, -1, -1, -1, -1, -1, -1, -1, // fist eight from class -1 +1, +1, +1, +1, +1, +1, +1, +1 // last eight from class +1 }; // Next, we create a linear Support Vector Machine with 4 inputs SupportVectorMachine machine = new SupportVectorMachine(inputs: 3); // Create the sequential minimal optimization learning algorithm var smo = new SequentialMinimalOptimization(machine, inputs, outputs); // We learn the machine double error = smo.Run(); // And then extract its predicted labels double[] predicted = new double[inputs.Length]; for (int i = 0; i < predicted.Length; i++) { predicted[i] = machine.Compute(inputs[i]); } // At this point, the output vector contains the labels which // should have been assigned by the machine, and the predicted // vector contains the labels which have been actually assigned. // Create a new ROC curve to assess the performance of the model var roc = new ReceiverOperatingCharacteristic(outputs, predicted); roc.Compute(100); // Compute a ROC curve with 100 cut-off points roc.GetScatterplot(true); Console.WriteLine(roc.Area.ToString()); Console.Write(roc.StandardError.ToString()); }
public void ComputeTest5() { var dataset = SequentialMinimalOptimizationTest.GetYingYang(); var inputs = dataset.Submatrix(null, 0, 1).ToJagged(); var labels = dataset.GetColumn(2).ToInt32(); var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); var smo = new SequentialMinimalOptimization(machine, inputs, labels); smo.UseComplexityHeuristic = true; double error = smo.Run(); Assert.AreEqual(0.2, error); Assert.AreEqual(0.11714451552090824, smo.Complexity); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(inputs[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(20, matrix.FalseNegatives); Assert.AreEqual(0, matrix.FalsePositives); Assert.AreEqual(30, matrix.TruePositives); Assert.AreEqual(50, matrix.TrueNegatives); } { Accord.Math.Tools.SetupGenerator(0); var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); var smo = new LinearNewtonMethod(machine, projection, labels); smo.UseComplexityHeuristic = true; double error = smo.Run(); Assert.AreEqual(0.18, error); Assert.AreEqual(0.11714451552090821, smo.Complexity, 1e-15); int[] actual = new int[labels.Length]; for (int i = 0; i < actual.Length; i++) { actual[i] = Math.Sign(machine.Compute(projection[i])); } ConfusionMatrix matrix = new ConfusionMatrix(actual, labels); Assert.AreEqual(17, matrix.FalseNegatives); Assert.AreEqual(1, matrix.FalsePositives); Assert.AreEqual(33, matrix.TruePositives); Assert.AreEqual(49, matrix.TrueNegatives); } }
/// <summary> /// Computes the error rate for a given set of inputs. /// </summary> /// public double ComputeError(double[][] inputs) { double error = 0; for (int i = 0; i < inputs.Length; i++) { error += machine.Compute(inputs[i]); } return(error); }
private static void and() { // Create a simple binary AND // classification problem: double[][] problem = { // a b a + b new double[] { 0, 0, 0 }, new double[] { 0, 1, 0 }, new double[] { 1, 0, 0 }, new double[] { 1, 1, 1 }, }; // Get the two first columns as the problem // inputs and the last column as the output // input columns double[][] inputs = problem.GetColumns(0, 1); // output column int[] outputs = problem.GetColumn(2).ToInt32(); // Plot the problem on screen ScatterplotBox.Show("AND", inputs, outputs).Hold(); // However, SVMs expect the output value to be // either -1 or +1. As such, we have to convert // it so the vector contains { -1, -1, -1, +1 }: // outputs = outputs.Apply(x => x == 0 ? -1 : 1); // Create a new linear-SVM for two inputs (a and b) SupportVectorMachine svm = new SupportVectorMachine(inputs: 2); // Create a L2-regularized L2-loss support vector classification var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs) { Loss = Loss.L2, Complexity = 1000, Tolerance = 1e-5 }; // Learn the machine double error = teacher.Run(computeError: true); // Compute the machine's answers for the learned inputs int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x))); // Plot the results ScatterplotBox.Show("SVM's answer", inputs, answers).Hold(); }
/// <summary> /// Computes the error ratio for a given set of input and outputs. /// </summary> /// public double ComputeError(double[][] inputs, double[] expectedOutputs) { // Compute errors double sum = 0; for (int i = 0; i < inputs.Length; i++) { double s = machine.Compute(inputs[i]) - expectedOutputs[i]; sum += s * s; } // Return error sum of squares return(sum); }
public void TransformTest() { var inputs = yinyang.Submatrix(null, 0, 1).ToJagged(); var labels = yinyang.GetColumn(2).ToInt32(); ConfusionMatrix actual, expected; SequentialMinimalOptimization a, b; var kernel = new Polynomial(2, 0); { var machine = new KernelSupportVectorMachine(kernel, inputs[0].Length); a = new SequentialMinimalOptimization(machine, inputs, labels); a.UseComplexityHeuristic = true; a.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) { values[i] = Math.Sign(machine.Compute(inputs[i])); } expected = new ConfusionMatrix(values, labels); } { var projection = inputs.Apply(kernel.Transform); var machine = new SupportVectorMachine(projection[0].Length); b = new SequentialMinimalOptimization(machine, projection, labels); b.UseComplexityHeuristic = true; b.Run(); int[] values = new int[labels.Length]; for (int i = 0; i < values.Length; i++) { values[i] = Math.Sign(machine.Compute(projection[i])); } actual = new ConfusionMatrix(values, labels); } Assert.AreEqual(a.Complexity, b.Complexity, 1e-15); Assert.AreEqual(expected.TrueNegatives, actual.TrueNegatives); Assert.AreEqual(expected.TruePositives, actual.TruePositives); Assert.AreEqual(expected.FalseNegatives, actual.FalseNegatives); Assert.AreEqual(expected.FalsePositives, actual.FalsePositives); }
/// <summary> /// Computes the error rate for a given set of input and outputs. /// </summary> /// public double ComputeError(double[][] inputs, int[] expectedOutputs) { // Compute errors int count = 0; for (int i = 0; i < inputs.Length; i++) { bool actual = machine.Compute(inputs[i]) >= 0; bool expected = expectedOutputs[i] >= 0; if (actual != expected) { count++; } } // Return misclassification error ratio return(count / (double)inputs.Length); }
public int test(double[] inputData) { // test the neural network IMLData input = new BasicMLData(inputData); Console.WriteLine(@"Neural Network Results:"); IMLData output = network.Compute(input); Console.WriteLine("Input: "); for (int i = 0; i < inputData.Length; i++) { Console.WriteLine(inputData[i]); } Console.WriteLine("Output = " + output[0]); EncogFramework.Instance.Shutdown(); return((int)Math.Round(output[0], MidpointRounding.AwayFromZero)); }
public TimeSeries Forecast(SupportVectorMachine network, NormalizeArray norm, TimeSeries simulatedData, List <DateTime> futureTimes) { int data_count = simulatedData.Count; int future_data_count = futureTimes.Count; double[] data = new double[data_count + future_data_count]; for (int idx = 0; idx < data_count; ++idx) { data[idx] = simulatedData[idx]; } for (int idx = 0; idx < future_data_count; ++idx) { data[data_count + idx] = 0; } TimeSeries ts = new TimeSeries(); double input_val = 0; for (int idx = 0; idx < future_data_count; ++idx) { var input = new BasicMLData(WindowSize); for (var i = 0; i < WindowSize; i++) { int idx2 = (data_count + idx - WindowSize) + i; if (idx2 < 0) { input_val = 0; } else { input_val = norm.Stats.Normalize(data[idx2]); } input[i] = input_val; } IMLData output = network.Compute(input); double prediction = norm.Stats.DeNormalize(output[0]); data[data_count + idx] = prediction; ts.Add(futureTimes[idx], prediction, false); } return(ts); }
private void btnTestingRun_Click(object sender, EventArgs e) { if (svm == null || dgvTestingSource.DataSource == null) { MessageBox.Show("Please create a machine first."); return; } // Creates a matrix from the source data table double[,] table = (dgvTestingSource.DataSource as DataTable).ToMatrix(); // Extract the first columns (X) double[][] inputs = table.GetColumns(0).ToArray(); // Extract the expected output values double[] expected = table.GetColumn(1); // Compute the actual machine outputs var output = new double[expected.Length]; for (int i = 0; i < expected.Length; i++) { output[i] = svm.Compute(inputs[i]); } // Compute R² and Sum-of-squares error double rSquared = Accord.Statistics.Tools.Determination(output, expected); double error = Elementwise.Pow(expected.Subtract(output), 2).Sum() / output.Length; // Anonymous magic! :D var r = new { RSquared = rSquared, Error = error }; dgvPerformance.DataSource = (new[] { r }).ToList(); // Create performance scatter plot CreateResultScatterplot(zedGraphControl1, inputs, expected, output); }
public void LearnTest() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] xor = { -1, 1, 1, -1 }; var kernel = new Polynomial(2, 0.0); double[][] augmented = new double[inputs.Length][]; for (int i = 0; i < inputs.Length; i++) { augmented[i] = kernel.Transform(inputs[i]); } SupportVectorMachine machine = new SupportVectorMachine(augmented[0].Length); // Create the Least Squares Support Vector Machine teacher var learn = new LinearDualCoordinateDescent(machine, augmented, xor); // Run the learning algorithm double error = learn.Run(); Assert.AreEqual(0, error); int[] output = augmented.Apply(p => Math.Sign(machine.Compute(p))); for (int i = 0; i < output.Length; i++) { Assert.AreEqual(System.Math.Sign(xor[i]), System.Math.Sign(output[i])); } }
public void LearnTest5() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] positives = { 1, 1, 1, 1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree SupportVectorMachine machine = new SupportVectorMachine(inputs[0].Length); // Create the sequential minimal optimization teacher SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, positives); learn.Complexity = 1; // Run the learning algorithm double error = learn.Run(); Assert.AreEqual(0, error); int[] output = inputs.Apply(p => (int)machine.Compute(p)); for (int i = 0; i < output.Length; i++) { bool sor = positives[i] >= 0; bool sou = output[i] >= 0; Assert.AreEqual(sor, sou); } }
/// <summary> /// Tests the previously created machine into a new set of data. /// </summary> /// private void btnTestingRun_Click(object sender, EventArgs e) { if (svm == null || dgvTestingSource.DataSource == null) { MessageBox.Show("Please create a machine first."); return; } // Creates a matrix from the source data table double[,] table = (dgvTestingSource.DataSource as DataTable).ToMatrix(); // Extract the first and second columns (X and Y) double[][] inputs = table.GetColumns(0, 1).ToArray(); // Extract the expected output labels int[] expected = table.GetColumn(2).ToInt32(); int[] output = new int[expected.Length]; // Compute the actual machine outputs for (int i = 0; i < expected.Length; i++) { output[i] = svm.Compute(inputs[i]) > 0.5 ? 1 : -1; } // Use confusion matrix to compute some performance metrics ConfusionMatrix confusionMatrix = new ConfusionMatrix(output, expected, 1, -1); dgvPerformance.DataSource = new[] { confusionMatrix }; // Create performance scatter plot CreateResultScatterplot(zedGraphControl1, inputs, expected.ToDouble(), output.ToDouble()); }
public void TrainTest2() { double[][] inputs = { new double[] { -1, -1 }, new double[] { -1, 1 }, new double[] { 1, -1 }, new double[] { 1, 1 } }; int[] or = { -1, -1, -1, +1 }; // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree SupportVectorMachine machine = new SupportVectorMachine(inputs[0].Length); // Create the sequential minimal optimization teacher SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, or); learn.Complexity = 1; // Run the learning algorithm learn.Run(); double[] output = machine.Compute(inputs); for (int i = 0; i < output.Length; i++) { bool sor = or[i] >= 0; bool sou = output[i] >= 0; Assert.AreEqual(sor, sou); } }
public void Learn() { var xorInput = Normalize(XorInputOriginal[1][0]); var classificationIdeal = Classify(XorIdealOriginal[1][0]); var svm = new SupportVectorMachine(2, false); IMLDataSet trainingSet = new BasicMLDataSet(xorInput, classificationIdeal); IMLTrain train = new SVMSearchTrain(svm, trainingSet); var epoch = 1; do { train.Iteration(); Console.WriteLine(@"Epoch #" + epoch + @" Error:" + train.Error); epoch++; } while (train.Error > 0.01); Console.WriteLine(@"SVM Results:"); var inputString = new StringBuilder(); var idealString = new StringBuilder(); var outputString = new StringBuilder(); foreach (var pair in trainingSet) { var output = svm.Compute(pair.Input); outputString.Append(_alphas[(int)output[0]]); inputString.Append(Denormalize(pair.Input)); idealString.Append(_alphas[(int)pair.Ideal[0]]); } Console.WriteLine("input=" + inputString + @" , " + @" , actual=" + outputString + @" , ideal=" + idealString); Console.WriteLine("Done"); }
private static void cancer() { // Create a new LibSVM sparse format data reader // to read the Wisconsin's Breast Cancer dataset // var reader = new SparseReader("examples-sparse.txt"); int[] outputs; // Read the classification problem into dense memory double[][] inputs = reader.ReadToEnd(sparse: false, labels: out outputs); // The dataset has output labels as 4 and 2. We have to convert them // into negative and positive labels so they can be properly processed. // outputs = outputs.Apply(x => x == 2 ? -1 : +1); // Create a new linear-SVM for the problem dimensions var svm = new SupportVectorMachine(inputs: reader.Dimensions); // Create a learning algorithm for the problem's dimensions var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs) { Loss = Loss.L2, Complexity = 1000, Tolerance = 1e-5 }; // Learn the classification double error = teacher.Run(); // Compute the machine's answers for the learned inputs int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x))); // Create a confusion matrix to show the machine's performance var m = new ConfusionMatrix(predicted: answers, expected: outputs); // Show it onscreen DataGridBox.Show(new ConfusionMatrixView(m)); }
/// <summary> /// Computes the summed square error for a given set of input and outputs. /// </summary> /// public double ComputeError(double[][] inputs, double[] expectedOutputs) { // Compute errors double error = 0; for (int i = 0; i < inputs.Length; i++) { double output; double actual = machine.Compute(inputs[i], out output); double expected = expectedOutputs[i]; if (Double.IsNaN(actual)) { Trace.WriteLine("SVM has produced NaNs"); } double e = (actual - expected); error += e * e; } // error sum of squares return(error); }
private static void xor() { // Create a simple binary XOR // classification problem: double[][] problem = { // a b a XOR b new double[] { 0, 0, 0 }, new double[] { 0, 1, 1 }, new double[] { 1, 0, 1 }, new double[] { 1, 1, 0 }, }; // Get the two first columns as the problem // inputs and the last column as the output // input columns double[][] inputs = problem.GetColumns(0, 1); // output column int[] outputs = problem.GetColumn(2).ToInt32(); // Plot the problem on screen ScatterplotBox.Show("XOR", inputs, outputs).Hold(); // However, SVMs expect the output value to be // either -1 or +1. As such, we have to convert // it so the vector contains { -1, -1, -1, +1 }: // outputs = outputs.Apply(x => x == 0 ? -1 : 1); // Create a new linear-SVM for two inputs (a and b) SupportVectorMachine svm = new SupportVectorMachine(inputs: 2); // Create a L2-regularized L2-loss support vector classification var teacher = new LinearDualCoordinateDescent(svm, inputs, outputs) { Loss = Loss.L2, Complexity = 1000, Tolerance = 1e-5 }; // Learn the machine double error = teacher.Run(computeError: true); // Compute the machine's answers for the learned inputs int[] answers = inputs.Apply(x => Math.Sign(svm.Compute(x))); // Plot the results ScatterplotBox.Show("SVM's answer", inputs, answers).Hold(); // Use an explicit kernel expansion to transform the // non-linear classification problem into a linear one // // Create a quadratic kernel Quadratic quadratic = new Quadratic(constant: 1); // Project the inptus into a higher dimensionality space double[][] expansion = inputs.Apply(quadratic.Transform); // Create a new linear-SVM for the transformed input space svm = new SupportVectorMachine(inputs: expansion[0].Length); // Create the same learning algorithm in the expanded input space teacher = new LinearDualCoordinateDescent(svm, expansion, outputs) { Loss = Loss.L2, Complexity = 1000, Tolerance = 1e-5 }; // Learn the machine error = teacher.Run(computeError: true); // Compute the machine's answers for the learned inputs answers = expansion.Apply(x => Math.Sign(svm.Compute(x))); // Plot the results ScatterplotBox.Show("SVM's answer", inputs, answers).Hold(); }
/// <summary> /// Runs the calibration algorithm. /// </summary> /// /// <param name="computeError"> /// True to compute error after the training /// process completes, false otherwise. Default is true. /// </param> /// /// <returns> /// The log-likelihood of the calibrated model. /// </returns> /// public double Run(bool computeError) { // This method is a direct implementation of the algorithm // as published by Hsuan-Tien Lin, Chih-Jen Lin and Ruby C. // Weng, 2007. See references in documentation for more details. // // Compute the Support Vector Machine outputs for (int i = 0; i < distances.Length; i++) { machine.Compute(inputs[i], out distances[i]); } // Define the target probabilities we aim to produce double high = (positives + 1.0) / (positives + 2.0); double low = 1.0 / (negatives + 2.0); for (int i = 0; i < outputs.Length; i++) { targets[i] = (outputs[i] == 1) ? high : low; } // Initialize double A = 0.0; double B = Math.Log((negatives + 1.0) / (positives + 1.0)); double logLikelihood = 0; int iterations = 0; // Compute the log-likelihood function for (int i = 0; i < distances.Length; i++) { double y = distances[i] * A + B; if (y >= 0) { logLikelihood += targets[i] * y + Special.Log1p(Math.Exp(-y)); } else { logLikelihood += (targets[i] - 1) * y + Special.Log1p(Math.Exp(y)); } } // Start main algorithm loop. while (iterations < maxIterations) { iterations++; // Update the Gradient and Hessian // (Using that H' = H + sigma I) double h11 = sigma; double h22 = sigma; double h21 = 0; double g1 = 0; double g2 = 0; for (int i = 0; i < distances.Length; i++) { double p, q; double y = distances[i] * A + B; if (y >= 0) { p = Math.Exp(-y) / (1.0 + Math.Exp(-y)); q = 1.0 / (1.0 + Math.Exp(-y)); } else { p = 1.0 / (1.0 + Math.Exp(y)); q = Math.Exp(y) / (1.0 + Math.Exp(y)); } double d1 = targets[i] - p; double d2 = p * q; // Update Hessian h11 += distances[i] * distances[i] * d2; h22 += d2; h21 += distances[i] * d2; // Update Gradient g1 += distances[i] * d1; g2 += d1; } // Check if the gradient is near zero as stopping criteria if (Math.Abs(g1) < tolerance && Math.Abs(g2) < tolerance) { break; } // Compute modified Newton directions double det = h11 * h22 - h21 * h21; double dA = -(h22 * g1 - h21 * g2) / det; double dB = -(-h21 * g1 + h11 * g2) / det; double gd = g1 * dA + g2 * dB; double stepSize = 1; // Perform a line search while (stepSize >= minStepSize) { double newA = A + stepSize * dA; double newB = B + stepSize * dB; double newLogLikelihood = 0.0; // Compute the new log-likelihood function for (int i = 0; i < distances.Length; i++) { double y = distances[i] * newA + newB; if (y >= 0) { newLogLikelihood += (targets[i]) * y + Special.Log1p(Math.Exp(-y)); } else { newLogLikelihood += (targets[i] - 1) * y + Special.Log1p(Math.Exp(y)); } } // Check if a sufficient decrease has been obtained if (newLogLikelihood < logLikelihood + 1e-4 * stepSize * gd) { // Yes, it has. Update parameters with the new values A = newA; B = newB; logLikelihood = newLogLikelihood; break; } else { // Decrease the step size until it can achieve // a sufficient decrease or until it fails. stepSize /= 2.0; } if (stepSize < minStepSize) { // No decrease could be obtained. Abort with an exception. throw new LineSearchFailedException("No sufficient decrease was obtained."); } } } if (iterations >= maxIterations) { // The method hasn't converged within the given // maximum number of iterations. Alert the user. throw new ConvergenceException("Maximum iterations reached."); } // The iterative algorithm has converged machine.Link = new LogitLinkFunction(beta: -A, constant: -B); // Compute log-likelihood if required return((computeError) ? LogLikelihood(inputs, outputs) : 0.0); }
public void ReceiverOperatingCharacteristicConstructorTest3() { // This example shows how to measure the accuracy of a // binary classifier using a ROC curve. For this example, // we will be creating a Support Vector Machine trained // on the following instances: double[][] inputs = { // Those are from class -1 new double[] { 2, 4, 0 }, new double[] { 5, 5, 1 }, new double[] { 4, 5, 0 }, new double[] { 2, 5, 5 }, new double[] { 4, 5, 1 }, new double[] { 4, 5, 0 }, new double[] { 6, 2, 0 }, new double[] { 4, 1, 0 }, // Those are from class +1 new double[] { 1, 4, 5 }, new double[] { 7, 5, 1 }, new double[] { 2, 6, 0 }, new double[] { 7, 4, 7 }, new double[] { 4, 5, 0 }, new double[] { 6, 2, 9 }, new double[] { 4, 1, 6 }, new double[] { 7, 2, 9 }, }; int[] outputs = { -1, -1, -1, -1, -1, -1, -1, -1, // fist eight from class -1 +1, +1, +1, +1, +1, +1, +1, +1 // last eight from class +1 }; // Create a linear Support Vector Machine with 4 inputs SupportVectorMachine machine = new SupportVectorMachine(inputs: 3); // Create the sequential minimal optimization teacher SequentialMinimalOptimization learn = new SequentialMinimalOptimization(machine, inputs, outputs); // Run the learning algorithm double error = learn.Run(); // Extract the input labels predicted by the machine double[] predicted = new double[inputs.Length]; for (int i = 0; i < predicted.Length; i++) { predicted[i] = machine.Compute(inputs[i]); } // Create a new ROC curve to assess the performance of the model var roc = new ReceiverOperatingCharacteristic(outputs, predicted); roc.Compute(100); // Compute a ROC curve with 100 points /* * // Generate a connected scatter plot for the ROC curve and show it on-screen * ScatterplotBox.Show(roc.GetScatterplot(includeRandom: true), nonBlocking: true) * * .SetSymbolSize(0) // do not display data points * .SetLinesVisible(true) // show lines connecting points * .SetScaleTight(true) // tighten the scale to points * .WaitForClose(); */ Assert.AreEqual(0.7890625, roc.Area); // Assert.AreEqual(0.1174774, roc.StandardError, 1e-6); HanleyMcNeil Assert.AreEqual(0.11958120746409709, roc.StandardError, 1e-6); }
static void Main() { //Uncomment this entire block for training Console.Write("SVM Trainer"); var Root = "C:\\Users\\Pictor17\\Python"; string FilePath = Root + "\\Wetdata3s.csv"; System.IO.TextReader reader = new StreamReader(FilePath); // Open dataset CsvReader datR = new CsvReader(reader, false); double[][] mat = datR.ToTable().ToArray(); mat = mat.Transpose(); double[][] matPCA = new double[mat.Length][]; mat.CopyTo(matPCA, 0); FilePath = Root + "\\Wetlabels3.csv"; System.IO.TextReader readerL = new StreamReader(FilePath); // Open labels CsvReader labR = new CsvReader(readerL, false); System.Data.DataTable temp = labR.ToTable(); double[] dlab = temp.Columns[0].ToArray(); int[] labels = new int[dlab.Length]; /* ------------------------------------------------------------------------------------------------------------------*/ //Convert values of 0 in csv to -1(0 = dry) for (int i = 0; i < dlab.Length; i++) { if (dlab[i] == 0) { dlab[i] = -1; } labels[i] = (int)dlab[i]; } /* ------------------------------------------------------------------------------------------------------------------*/ // Perform PCA on dataset to reduce features //var pca = new PrincipalComponentAnalysis(matPCA, AnalysisMethod.Center); //Console.Write("\nPerforming Mean-centred PCA"); //pca.Compute(); //double[][] matPCAtransform = pca.Transform(matPCA, 300); System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); //Transform WellROI from PCA //string pPath = String.Format(@"{0}\Pictorial\Files\pca.bin", Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData)); //try //{ //using (FileStream LoadPCAStream = File.Open(pPath, FileMode.Open, FileAccess.Read)) //{ //var pca = (PrincipalComponentAnalysis)formatter.Deserialize(LoadPCAStream); //Take 100 most significant features //matPCAtransform = pca.Transform(mat, (int)300); // } //} //catch (IOException) //{ //} /* ------------------------------------------------------------------------------------------------------------------*/ //Sort data variables and split into test / train set var nRows = mat.Length; var nCols = mat[0].Length; var nRowsTest = Convert.ToInt32(0.05 * nRows); //95% Train / 5% Test var nRowsTrain = nRows - nRowsTest; double[][] trainDat = new double[nRowsTrain][]; int[] y_train = new int[nRowsTrain]; //double[] y_train = new double[nRowsTrain]; for (int k = 0; k < nRowsTrain; k++) { trainDat[k] = new double[nCols]; Array.Copy(mat[k], trainDat[k], nCols); y_train[k] = (int)dlab[k]; //y_train[k] = dlab[k]; } double[][] testDat = new double[nRowsTest][]; int[] y_test = new int[nRowsTest]; for (int k = 0; k < nRowsTest; k++) { testDat[k] = new double[nCols]; Array.Copy(mat[nRows - nRowsTest + k], testDat[k], nCols); y_test[k] = (int)dlab[nRows - nRowsTest + k]; } Console.Write("\nDataset Uploaded"); double[] pred = new double[nRowsTest]; double[] predksvm = new double[nRowsTest]; double[][] outputs = new double[y_train.Length][]; for (int k = 0; k < y_train.Length; k++) { outputs[k] = new double[] { 0 }; } for (int j = 0; j < y_train.Length; j++) { outputs[j][0] = y_train[j]; } //int numInputs = 36300; //int numClasses = 2; //int hidden = 1; ////double[][] outputs = Accord.Statistics.Tools.Expand(y_train, numClasses, -1, 1); //ActivationNetwork network = new ActivationNetwork(new SigmoidFunction(), numInputs, hidden, 1); //Accord.Neuro.Learning.LevenbergMarquardtLearning teacher = new Accord.Neuro.Learning.LevenbergMarquardtLearning(network); //for(int i = 0; i < 10; i++) //{ // double error = teacher.RunEpoch(trainDat, outputs); //} //Gaussian gauss = new Gaussian(); //gauss.Gamma = 0.01; //Quadratic quad = new Quadratic(1); //KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(quad,mat.Columns()); //SequentialMinimalOptimization ksmo = new SequentialMinimalOptimization(ksvm, trainDat, y_train); //ksmo.Complexity = 0.0001; //double error = ksmo.Run(); //Create grid search Accord.MachineLearning.GridSearchRange[] ranges = { new Accord.MachineLearning.GridSearchRange("complexity", new double[] { 1E-4, 0.5E-4, 1E-3 }), }; Console.Write("\nPerforming Grid Search on SVM"); var gridsearch = new Accord.MachineLearning.GridSearch <SupportVectorMachine>(ranges); gridsearch.Fitting = delegate(Accord.MachineLearning.GridSearchParameterCollection parameters, out double error) { double complexity = parameters["complexity"].Value; SupportVectorMachine svm = new SupportVectorMachine(mat.Columns()); SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, trainDat, y_train); //ProbabilisticCoordinateDescent smo = new ProbabilisticCoordinateDescent(svm, trainDat, y_train); smo.Complexity = complexity; error = smo.Run(); return(svm); }; Console.Write("\nTraining Optimised SVM..."); Accord.MachineLearning.GridSearchParameterCollection bestParameters; double minError; SupportVectorMachine bsvm = gridsearch.Compute(out bestParameters, out minError); var crossvalidation = new Accord.MachineLearning.CrossValidation(size: mat.Length, folds: 10); crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation) { var trainingInputs = mat.Submatrix(indicesTrain); var trainingOutputs = labels.Submatrix(indicesTrain); var validationInputs = mat.Submatrix(indicesValidation); var validationOutputs = labels.Submatrix(indicesValidation); var svm = new SupportVectorMachine(mat.Columns()); var smo = new SequentialMinimalOptimization(svm, trainingInputs, trainingOutputs); //var smo = new ProbabilisticCoordinateDescent(svm, trainingInputs, trainingOutputs); smo.Complexity = bestParameters[0].Value; double error = smo.Run(); double validationError = smo.ComputeError(validationInputs, validationOutputs); return(new Accord.MachineLearning.CrossValidationValues(svm, error, validationError)); }; var result = crossvalidation.Compute(); double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; ////var minError = result.Models.Select(y => y.ValidationValue).Min(); ////var bestModel = result.Models.Where(x => x.ValidationValue == minError).FirstOrDefault(); ////SupportVectorMachine bsvm = (SupportVectorMachine)bestModel.Model; ///* ------------------------------------------------------------------------------------------------------------------*/ //Accord.MachineLearning.GridSearchRange[] rangesk = //{ // new Accord.MachineLearning.GridSearchRange("complexity" , new double[] {1E-3,1E-2,0.1}), // new Accord.MachineLearning.GridSearchRange("gamma", new double[] {1E-4,0.001,0.01, 0.1}), //}; //Console.Write("\nPerforming Grid Search on kSVM"); //var gridsearchk = new Accord.MachineLearning.GridSearch<KernelSupportVectorMachine>(rangesk); //gridsearchk.Fitting = delegate (Accord.MachineLearning.GridSearchParameterCollection parametersk, out double errork) //{ // double complexity = parametersk["complexity"].Value; // double gamma = parametersk["gamma"].Value; // Gaussian gauss = new Gaussian(); // gauss.Gamma = gamma; // KernelSupportVectorMachine ksvm = new KernelSupportVectorMachine(gauss, mat.Columns()); // SequentialMinimalOptimization ksmo = new SequentialMinimalOptimization(ksvm, trainDat, y_train); // ksmo.Complexity = complexity; // errork = ksmo.Run(); // return ksvm; //}; //Console.Write("\nTraining Optimised kSVM..."); //Accord.MachineLearning.GridSearchParameterCollection bestParametersk; double minErrork; //KernelSupportVectorMachine bksvm = gridsearchk.Compute(out bestParametersk, out minErrork); //var crossvalidationk = new Accord.MachineLearning.CrossValidation(size: mat.Length, folds: 10); //crossvalidationk.Fitting = delegate (int k, int[] indicesTrain, int[] indicesValidation) //{ // var trainingInputs = mat.Submatrix(indicesTrain); // var trainingOutputs = labels.Submatrix(indicesTrain); // var validationInputs = mat.Submatrix(indicesValidation); // var validationOutputs = labels.Submatrix(indicesValidation); // var ksvm = new SupportVectorMachine(mat.Columns()); // var ksmo = new SequentialMinimalOptimization(ksvm, trainingInputs, trainingOutputs); // ksmo.Complexity = bestParametersk[0].Value; // double error = ksmo.Run(); // double validationError = ksmo.ComputeError(validationInputs, validationOutputs); // return new Accord.MachineLearning.CrossValidationValues(ksvm, error, validationError); //}; //var resultk = crossvalidationk.Compute(); //double trainingErrorsk = resultk.Training.Mean; //double validationErrorsk = resultk.Validation.Mean; ////double error = smo.Run(); ////double errork = ksmo.Run(); //Console.Write("\nTraining Complete"); ////Console.Write("\nBest C: %0.2f", bestParameters[0].Value); ///* ------------------------------------------------------------------------------------------------------------------*/ //Save SVM Model //System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatter = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); FileStream Savestream = new FileStream("svm.bin", FileMode.Create); formatter.Serialize(Savestream, bsvm); Savestream.Close(); Console.Write("\nSaved SVM Model"); ////FileStream PCAStream = new FileStream("pca.bin", FileMode.Create); ////formatter.Serialize(PCAStream, pca); ////PCAStream.Close(); ////Console.Write("\nSaved PCA matrix"); //System.Runtime.Serialization.Formatters.Binary.BinaryFormatter formatterk = new System.Runtime.Serialization.Formatters.Binary.BinaryFormatter(); //FileStream Savestreamk = new FileStream("ksvm.bin", FileMode.Create); //formatter.Serialize(Savestreamk, bksvm); //Savestreamk.Close(); //Console.Write("\nSaved kSVM Model"); //string tPath = String.Format(@"{0}\Pictorial\Files\svm.bin", Environment.GetFolderPath(Environment.SpecialFolder.ApplicationData)); //string tPath = "svm.bin"; //Local project location of SVM model //Predict values for (int i = 0; i < nRowsTest; i++) //Uncomment for training { pred[i] = bsvm.Compute(testDat[i]); //Uncomment for training // predksvm[i] = bksvm.Compute(testDat[i]); // pred[i] = ksvm.Compute(testDat[i]); } ///* ------------------------------------------------------------------------------------------------------------------*/ //// Use confusion matrix to compute some performance metrics int[] predict = new int[pred.Length]; //int[] predictk = new int[predksvm.Length]; for (int p = 0; p < pred.Length; p++) { predict[p] = Math.Sign(pred[p]); // predictk[p] = Math.Sign(predksvm[p]); } ConfusionMatrix confusionMatrix = new ConfusionMatrix(predict, y_test, -1, 1); //Predicted, Expected, Positive, Negative Uncomment for training //ConfusionMatrix confusionMatrixk = new ConfusionMatrix(predictk, y_test, -1, 1); }