예제 #1
0
        private static void SupportVectorMachineTraining(IEnumerable <MatchingPair> trainingData, IEnumerable <MatchingPair> testData, IDictionary <string, IndexableAttributeMetadata> actualMetadata)
        {
            var stopWatch = new Stopwatch();

            stopWatch.Start();

            var trainingInputs  = trainingData.Select(data => data.ToVectorArray(actualMetadata)).ToArray();
            var trainingOutputs = trainingData.Select(data => data.PercentMatch > 0).ToArray();
            var testInputs      = testData.Select(data => data.ToVectorArray(actualMetadata)).ToArray();
            var testOutputs     = testData.Select(data => data.PercentMatch > 0).ToArray();

            var learn = new SequentialMinimalOptimization <Gaussian>()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = true
            };

            SupportVectorMachine <Gaussian> svm = learn.Learn(trainingInputs, trainingOutputs);

            var inSampleScore    = svm.Score(trainingInputs);
            var outOfSampleScore = svm.Score(testInputs);

            Logger.InfoFormat("Result:\nIn-sample: {0}\nOut-of-sample:{1}", string.Join(", ", inSampleScore), string.Join(", ", outOfSampleScore));

            var results        = svm.Decide(trainingInputs);
            var inSampleErrors = trainingOutputs.Where((t, i) => results[i] != t).Count();

            results = svm.Decide(testInputs);
            var outOfSampleErrors = testOutputs.Where((t, i) => results[i] != t).Count();

            Logger.InfoFormat("Errors: In-sample: {0} Out-of-sample: {1}", inSampleErrors, outOfSampleErrors);

            stopWatch.Stop();
            Logger.InfoFormat("Regression Tree learning took {0}", stopWatch.Elapsed);
        }
예제 #2
0
        public int[] Predict(List <TrainingValue> predictionData)
        {
            if (!Trained)
            {
                throw new Exception("Train must be called first!");
            }

            double[][] featuresArray = new double[predictionData.Count][];

            for (int i = 0; i < featuresArray.Length; i++)
            {
                featuresArray[i] = predictionData[i].Features;
            }

            switch (type)
            {
            case ClassifierType.DecisionTree:
                return(tree.Decide(featuresArray));

            case ClassifierType.LDA:
                return(pipeline.Decide(featuresArray));

            case ClassifierType.SVM:
                return(convertBoolArray(svm.Decide(featuresArray)));

            case ClassifierType.Bayes:
                return(bayes.Decide(featuresArray));
            }

            return(null);
        }
예제 #3
0
        public double Predict(double[][] observations, int[] labels)
        {
            bool[] output         = machine.Decide(observations);
            int[]  zeroOneAnswers = output.ToZeroOne();

            return(1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers)));
        }
예제 #4
0
        private void createSurface(double[,] table)
        {
            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = Matrix.GetRange(table, 0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.Mesh(ranges[0], 200, ranges[1], 200);

            // Classify each point in the Cartesian coordinate system
            double[][] surface = map.InsertColumn(svm.Decide(map));

            CreateScatterplot(zedGraphControl2, surface);
        }
예제 #5
0
        private void createSurface(double[,] table)
        {
            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = table.GetRange(0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.Mesh(ranges[0], 200, ranges[1], 200);

            // Classify each point in the Cartesian coordinate system
            double[] result = svm.Decide(map).ToMinusOnePlusOne().ToDouble();
            double[,] surface = map.ToMatrix().InsertColumn(result);

            CreateScatterplot(zedGraphControl2, surface);
        }
예제 #6
0
        public void linear_without_threshold_doesnt_solve_xor()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] xor =
            {
                -1,
                1,
                1,
                -1
            };

            // Create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimization()
            {
                Complexity = 1e-5
            };

            // Run the learning algorithm
            SupportVectorMachine machine = learn.Learn(inputs, xor);

            bool[] output = machine.Decide(inputs);

            for (int i = 0; i < output.Length; i++)
            {
                Assert.AreEqual(false, output[i]);
            }
        }
예제 #7
0
        public double Learn(double[][] observations, int[] labels)
        {
            var gridsearch = GridSearch <double[], int> .Create(

                ranges : new
            {
                Tolerance = GridSearch.Range(1e-10, 1.0, stepSize: 0.05)
            },

                learner : (p) => new LinearDualCoordinateDescent
            {
                Complexity = 1e+10,
                Tolerance  = p.Tolerance
            },

                fit : (teacher, x, y, w) => teacher.Learn(x, y, w),

                loss : (actual, expected, m) => new ZeroOneLoss(expected).Loss(actual)
                );

            gridsearch.ParallelOptions.MaxDegreeOfParallelism = 2;

            var result = gridsearch.Learn(observations, labels);

            machine = result.BestModel;
            bool[] output         = machine.Decide(observations);
            int[]  zeroOneAnswers = output.ToZeroOne();

            double ratio = 1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers));

            return(ratio);
        }
예제 #8
0
        public void SequentialMinimalOptimizationConstructorTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] or =
            {
                0,
                0,
                0,
                +1
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            SupportVectorMachine machine = new SupportVectorMachine(inputs[0].Length);

            var learn = new SequentialMinimalOptimization(machine, inputs, or);

            learn.Run();

            for (int i = 0; i < inputs.Length; i++)
            {
                bool actual = machine.Decide(inputs[i]);
                Assert.AreEqual(or[i] > 0, actual);
            }
        }
예제 #9
0
        /// <summary>
        /// Classify our data using support vector machine classifer and save the model.
        /// </summary>
        /// <param name="train_data">Frame objects that we will use to train classifers.</param>
        /// <param name="test_data">Frame objects that we will use to test classifers.</param>
        /// <param name="train_label">Labels of the train data.</param>
        /// <param name="test_label">Labels of the test data.</param>
        /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param>
        /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param>
        /// <returns></returns>
        public void SVM(double[][] train_data, double[][] test_data, int[] train_label, int[] test_label, String Classifier_Path, String Classifier_Name)
        {
            var learn = new SequentialMinimalOptimization <Gaussian>()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = true
            };

            try
            {
                SupportVectorMachine <Gaussian> svm = learn.Learn(train_data, train_label);

                bool[] prediction = svm.Decide(test_data);

                var cm = GeneralConfusionMatrix.Estimate(svm, test_data, test_label);


                double error = cm.Error;

                Console.WriteLine(error);

                svm.Save(Path.Combine(Classifier_Path, Classifier_Name));
            }
            catch (Exception e)
            { Console.WriteLine(e.StackTrace); }
        }
예제 #10
0
        public ConfusionMatrix test(List <pair> test_data, string emotion)
        {
            if (Boolean.Parse(System.Configuration.ConfigurationManager.AppSettings["rep_results"]) == true)
            {
                Accord.Math.Random.Generator.Seed = 0;
            }

            this.Filepath = Path.Combine(trained_location, emotion + "__" + kernel);
            SupportVectorMachine <Linear> svm = Serializer.Load <SupportVectorMachine <Linear> >(this.Filepath);

            bool[] prediction = svm.Decide(this.test_sample.ToArray());
            int[]  results    = prediction.ToZeroOne();

            this.svm_feed_test.Clear();
            for (int _counter = 0; _counter < test_data.Count; _counter++)
            {
                if (test_data[_counter].pair_emot == emotion)
                {
                    this.svm_feed_test.Add(1);
                }
                else
                {
                    this.svm_feed_test.Add(0);
                }
            }

            return(new ConfusionMatrix(results, this.svm_feed_test.ToArray(), 1, 0));
        }
예제 #11
0
    // Compute car control based on sensor readings
    void ComputeControl(float sensorL, float sensorF, float sensorR, float carVelocity, float forward)
    {
        // Inputs
        double[][] inputsT = new double[1][];
        inputsT[0]    = new double[4];
        inputsT[0][0] = sensorL;
        inputsT[0][1] = sensorF;
        inputsT[0][2] = sensorR;
        inputsT[0][3] = carVelocity;

        double[][] inputs = new double[1][];
        inputs[0]    = new double[5];
        inputs[0][0] = sensorL;
        inputs[0][1] = sensorF;
        inputs[0][2] = sensorR;
        inputs[0][3] = carVelocity;
        inputs[0][4] = forward;

        answerThrust = treeThrust.Decide(inputsT);
        answerSteer  = treeSteer.Decide(inputs);

        // Thrust

        if (answerThrust[0] == true)
        {
            thrust = pod[1];
            //Debug.Log("Acelera!");
        }
        else if (answerThrust[0] == false)
        {
            thrust = -pod[2];
            //Debug.Log("Freia!");
        }

        // Steer

        if (answerSteer[0] == true)
        {
            if (sensorLeft > sensorRight)
            {
                steer = pod[3];
                //Debug.Log("Vire a Esquerda!");
            }
            else
            {
                steer = -pod[3];
                //Debug.Log("Vire a Direita!");
            }
        }
        else if (answerSteer[0] == false)
        {
            steer = 0;
            //Debug.Log("Vai reto!");
        }

        // Command
        rb.AddRelativeForce(new Vector2(0f, thrust));
        rb.AddTorque(steer);
    }
        public bool Predict(string matchId)
        {
            var inputs = CastListOfBioMeasuresToListOfDoubles(
                unitOfWork.Matches.GetBioMeasureForPrediction(matchId));

            return(svm.Decide(inputs)
                   .FirstOrDefault());
        }
 public void Test(List <Person> testingPeople, int skillSetSize)
 {
     double[][] inputs = _dataPointService.GenerateDataPointsFromPeople(testingPeople, skillSetSize);
     testPredictions = _supportVectorMachine.Decide(inputs);
     File.WriteAllLines(
         @"C:\Users\Niall\Documents\Visual Studio 2015\Projects\LinkedInSearchUi\LinkedIn Dataset\XML\support_vector_machine_test_predictions.txt" // <<== Put the file name here
         , testPredictions.Select(d => d.ToString()).ToArray());
 }
예제 #14
0
        private void createSurface(double[,] table)
        {
            // Get the ranges for each variable (X and Y)
            DoubleRange[] ranges = table.GetRange(0);

            // Generate a Cartesian coordinate system
            double[][] map = Matrix.Cartesian(
                Vector.Interval(ranges[0], 0.2),
                Vector.Interval(ranges[1], 0.5));

            // Classify each point in the Cartesian coordinate system
            double[] result = svm.Decide(map).ToMinusOnePlusOne().ToDouble();
            double[,] surface = map.ToMatrix().InsertColumn(result);

            DataTable trainingData = DataSet.Tables["InterestedTraining"];

            CreateSurfaceScatterplot(zedGraphControl2, surface);
        }
예제 #15
0
        /// <summary>
        /// <inheritdoc />
        /// </summary>
        public override void Run()
        {
            var inputs = data.GetSelectedInput(features);

            var result = svm.Decide(inputs);

            for (int i = 0; i < result.Length; i++)
            {
                ClassificationOutputs[i] = Convert.ToInt32(result[i]);
            }
        }
예제 #16
0
파일: MachineL.cs 프로젝트: M-doubleC/ZyLAB
    public int[] TestSession(List <string[]> TestData, string emo)
    {
        double[][] TestD = Embedder(TestData);// W2Vectorizer() can be used as alternative as well
        //double[][] TestD = codeB.Transform(TestData.ToArray());
        string filename = Path.Combine(".../ZyLAB_Trained", language + "_" + emo + "_EmoKernel.accord");

        KernelSVM = Serializer.Load <SupportVectorMachine <Gaussian> >(filename);
        bool[] answers        = KernelSVM.Decide(TestD);
        int[]  zeroOneAnswers = answers.ToZeroOne();
        return(zeroOneAnswers);
    }
예제 #17
0
        public bool isImageEmpty(Bitmap src)
        {
            bool            ret  = false;
            Bitmap          g    = Grayscale.CommonAlgorithms.BT709.Apply(src);
            ImageStatistics stat = new ImageStatistics(g);

            double[][] ds = { new double[] { stat.Gray.Mean, stat.Gray.Median, stat.Gray.StdDev } };
            Program.logIt(string.Format("{0},{1},{2}", ds[0][0], ds[0][1], ds[0][2]));
            bool[] res = svm.Decide(ds);
            ret = !res[0];
            return(ret);
        }
예제 #18
0
        public static bool[] SVMDecide(SupportVectorMachine <Gaussian> svm, IDataView input)
        {
            var convertedInput = IDataViewToAccord(input).inputs;

            //var lrScore = svm.Score(convertedInput);
            //var lrProbability = svm.Probabilities(convertedInput);
            //bool[] output = new bool[lrProbability.Length];
            //for (int i = 0; i < lrProbability.Length; i++)
            //{
            //	output[i] = lrProbability[i][1] >= threshold;
            //}
            return(svm.Decide(convertedInput));
        }
            public bool IsFace(Bitmap image)
            {
                if (image == null)
                {
                    throw new ArgumentNullException(nameof(image));
                }

                using (var windowedImageForFeatureExtraction = image.ExtractImageSectionAndResize(new Rectangle(new Point(0, 0), image.Size), new Size(_sampleWidth, _sampleHeight)))
                {
                    return(_svm.Decide(
                               FeatureExtractor.GetFor(windowedImageForFeatureExtraction, _blockSize, optionalHogPreviewImagePath: null, normaliser: _normaliser).ToArray()
                               ));
                }
            }
예제 #20
0
        public double Learn(double[][] observations, int[] labels)
        {
            var learn = new SequentialMinimalOptimization <ChiSquare>()
            {
                UseKernelEstimation = true
            };

            machine = learn.Learn(observations, labels);
            bool[] output         = machine.Decide(observations);
            int[]  zeroOneAnswers = output.ToZeroOne();

            double ratio = 1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers));

            return(ratio);
        }
예제 #21
0
        public bool classifierSVM(data d)
        {
            int D = d.d;

            double[] input = new double[D];
            bool     output;

            for (int i = 0; i < D; ++i)
            {
                input[i] = d.msg[i];
            }

            output = svm.Decide(input);

            return(output);
        }
예제 #22
0
        public double Learn(double[][] observations, int[] labels)
        {
            var learn = new SequentialMinimalOptimization <Gaussian>()
            {
                UseComplexityHeuristic = true,
                Kernel = new Gaussian(1.2)
            };


            machine = learn.Learn(observations, labels);
            bool[] output         = machine.Decide(observations);
            int[]  zeroOneAnswers = output.ToZeroOne();

            double ratio = 1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers));

            return(ratio);
        }
        public void Train()
        {
            var inputsOutputs = unitOfWork.Matches.GetBioMeasuresForTraining();
            var inputs        = CastListOfBioMeasuresToListOfDoubles(inputsOutputs);
            var outputs       = inputsOutputs
                                .Select(match => Convert.ToBoolean((int)match.FirstOrDefault().Match.MatchResult))
                                .ToArray();

            var smo = new SequentialMinimalOptimization <Gaussian>()
            {
                Complexity = 100
            };

            svm = smo.Learn(inputs, outputs);

            bool[] prediction = svm.Decide(inputs);
        }
예제 #24
0
        public double Learn(double[][] observations, int[] labels)
        {
            //var learn = new LinearDualCoordinateDescent()
            //{
            //    Loss = Loss.L2,
            //    Complexity = 1000,
            //    Tolerance = 1e-5
            //};
            SequentialMinimalOptimization learn = new SequentialMinimalOptimization()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = false
            };

            machine = learn.Learn(observations, labels);
            bool[] output         = machine.Decide(observations);
            int[]  zeroOneAnswers = output.ToZeroOne();

            return(1 - (new AccuracyLoss(labels).Loss(zeroOneAnswers)));
        }
예제 #25
0
        private void PredictImages(object sender, RoutedEventArgs e)
        {
            imageInfo.Text = "Predicting";

            if (bow == null)
            {
                MessageBox.Show("No BoW model!");
                return;
            }
            if (svmIm == null)
            {
                MessageBox.Show("No SVM model!");
                return;
            }

            Bitmap[] trainIms = new Bitmap[imagesEdited.Count];

            ushort z = 0;

            foreach (BitmapImage b in imagesEdited)
            {
                trainIms[z++] = UtilFn.BitmapImage2Bitmap(b);
            }

            double[][] features = bow.Transform(trainIms);

            output = svmIm.Decide(features);
            if (output != null)
            {
                if (output[j] && !cvs.Children.Contains(rectSel2))
                {
                    cvs.Children.Add(rectSel2);
                }
                else if (!output[j] && cvs.Children.Contains(rectSel2))
                {
                    cvs.Children.Remove(rectSel2);
                }
            }
            imageInfo.Text       = "Done";
            btnCorrect.IsEnabled = true;
        }
예제 #26
0
        public void learn_new_method()
        {
            #region doc_xor_normal
            // As an example, we will try to learn a decision machine
            // that can replicate the "exclusive-or" logical function:

            double[][] inputs =
            {
                new double[] { 0, 0 }, // the XOR function takes two booleans
                new double[] { 0, 1 }, // and computes their exclusive or: the
                new double[] { 1, 0 }, // output is true only if the two booleans
                new double[] { 1, 1 }  // are different
            };

            int[] xor = // this is the output of the xor function
            {
                0,      // 0 xor 0 = 0 (inputs are equal)
                1,      // 0 xor 1 = 1 (inputs are different)
                1,      // 1 xor 0 = 1 (inputs are different)
                0,      // 1 xor 1 = 0 (inputs are equal)
            };

            // Now, we can create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimization <Gaussian>()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = true
            };

            // And then we can obtain a trained SVM by calling its Learn method
            SupportVectorMachine <Gaussian> svm = learn.Learn(inputs, xor);

            // Finally, we can obtain the decisions predicted by the machine:
            bool[] prediction = svm.Decide(inputs);
            #endregion

            Assert.AreEqual(prediction, Classes.Decide(xor));
        }
        public void Train(List <Person> trainingPeople, int skillSetSize)
        {
            double[][] inputs = _dataPointService.GenerateDataPointsFromPeople(trainingPeople, skillSetSize);

            int[] expectedResults = _dataPointService.GenerateExpectedResultFromPeople(trainingPeople);

            // Now, we can create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimization()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = false
            };

            // And then we can obtain a trained SVM by calling its Learn method
            _supportVectorMachine = learn.Learn(inputs, expectedResults);

            // Finally, we can obtain the decisions predicted by the machine:
            trainingPredictions = _supportVectorMachine.Decide(inputs);

            File.WriteAllLines(
                @"C:\Users\Niall\Documents\Visual Studio 2015\Projects\LinkedInSearchUi\LinkedIn Dataset\XML\predictions.txt" // <<== Put the file name here
                , trainingPredictions.Select(d => d.ToString()).ToArray());
        }
예제 #28
0
파일: MainForm.cs 프로젝트: tranntn/DoAn3
        /// <summary>
        ///   Tests the previously created machine into a new set of data.
        /// </summary>
        ///
        private void btnTestingRun_Click(object sender, EventArgs e)
        {
            if (svm == null || dgvTestingSource.DataSource == null)
            {
                MessageBox.Show("Please create a machine first.");
                return;
            }


            // Creates a matrix from the source data table
            double[,] table = (dgvTestingSource.DataSource as DataTable).ToMatrix();
            // Extract the first and second columns (X and Y)
            double[][] inputs = table.GetColumns(0, 1, 2, 3, 4, 5).ToJagged();
            // Extract the expected output labels
            bool[] expected = Classes.Decide(table.GetColumn(6));
            // Compute the actual machine outputs
            bool[] output = svm.Decide(inputs);
            // Use confusion matrix to compute some performance metrics
            dgvPerformance.DataSource = new [] { new ConfusionMatrix(output, expected) };
            // Create performance scatter plot
            CreateResultScatterplot(zedGraphControl1, inputs,
                                    expected.ToMinusOnePlusOne().ToDouble(),
                                    output.ToMinusOnePlusOne().ToDouble());
        }
        public void LeastSquaresConstructorTest()
        {
            double[][] inputs =
            {
                new double[] { -1, -1 },
                new double[] { -1,  1 },
                new double[] {  1, -1 },
                new double[] {  1,  1 }
            };

            int[] or =
            {
                0,
                0,
                0,
                +1
            };

            // Create Kernel Support Vector Machine with a Polynomial Kernel of 2nd degree
            var machine = new SupportVectorMachine(inputs[0].Length);

            var learn = new LeastSquaresLearning(machine, inputs, or);

            double error = learn.Run();

            Assert.AreEqual(0, error);

            {
                int[] iout = new int[inputs.Length];
                machine.ToMulticlass().Decide(inputs, iout);
                for (int i = 0; i < iout.Length; i++)
                {
                    Assert.AreEqual(or[i], iout[i]);
                }
            }
            {
                double[] dout = new double[inputs.Length];
                machine.ToMulticlass().Decide(inputs, dout);
                for (int i = 0; i < dout.Length; i++)
                {
                    Assert.AreEqual(or[i], dout[i]);
                }
            }
            {
                bool[] bout = new bool[inputs.Length];
                machine.Decide(inputs, bout);
                Assert.IsFalse(bout[0]);
                Assert.IsFalse(bout[1]);
                Assert.IsFalse(bout[2]);
                Assert.IsTrue(bout[3]);
            }
            {
                int[][] iiout = Jagged.Create <int>(inputs.Length, 2);
                machine.ToMulticlass().Decide(inputs, iiout);
                for (int i = 0; i < iiout.Length; i++)
                {
                    Assert.AreEqual(or[i], iiout[i][0]);
                    Assert.AreEqual(or[i], iiout[i][1] == 1 ? 0 : 1);
                }
            }
            {
                bool[][] bbout = Jagged.Create <bool>(inputs.Length, 2);
                machine.ToMulticlass().Decide(inputs, bbout);
                for (int i = 0; i < bbout.Length; i++)
                {
                    Assert.AreEqual(or[i], bbout[i][0] ? 1 : 0);
                    Assert.AreEqual(or[i], bbout[i][1] ? 0 : 1);
                }
            }
        }
예제 #30
0
        public void learn_linear()
        {
            #region doc_xor_linear
            // As an example, we will try to learn a linear machine  that can
            // replicate the "exclusive-or" logical function. However, since we
            // will be using a linear SVM, we will not be able to solve this
            // problem perfectly as the XOR is a non-linear classification problem:
            double[][] inputs =
            {
                new double[] { 0, 0 }, // the XOR function takes two booleans
                new double[] { 0, 1 }, // and computes their exclusive or: the
                new double[] { 1, 0 }, // output is true only if the two booleans
                new double[] { 1, 1 }  // are different
            };

            int[] xor = // this is the output of the xor function
            {
                0,      // 0 xor 0 = 0 (inputs are equal)
                1,      // 0 xor 1 = 1 (inputs are different)
                1,      // 1 xor 0 = 1 (inputs are different)
                0,      // 1 xor 1 = 0 (inputs are equal)
            };

            // Now, we can create the sequential minimal optimization teacher
            var learn = new SequentialMinimalOptimization()
            {
                UseComplexityHeuristic = true,
                UseKernelEstimation    = false
            };

            // And then we can obtain a trained SVM by calling its Learn method
            SupportVectorMachine svm = learn.Learn(inputs, xor);

            // Finally, we can obtain the decisions predicted by the machine:
            bool[] prediction = svm.Decide(inputs);
            #endregion

            Assert.AreEqual(prediction[0], false);
            Assert.AreEqual(prediction[1], false);
            Assert.AreEqual(prediction[2], false);
            Assert.AreEqual(prediction[3], false);


            int[] or = // this is the output of the xor function
            {
                0,     // 0 or 0 = 0 (inputs are equal)
                1,     // 0 or 1 = 1 (inputs are different)
                1,     // 1 or 0 = 1 (inputs are different)
                1,     // 1 or 1 = 1 (inputs are equal)
            };


            learn = new SequentialMinimalOptimization()
            {
                Complexity          = 1e+8,
                UseKernelEstimation = false
            };

            svm = learn.Learn(inputs, or);

            prediction = svm.Decide(inputs);

            Assert.AreEqual(prediction[0], false);
            Assert.AreEqual(prediction[1], true);
            Assert.AreEqual(prediction[2], true);
            Assert.AreEqual(prediction[3], true);
        }