public void RunTest3()
        {
            // Example XOR problem
            double[][] inputs =
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            // Dichotomy SVM outputs should be given as [-1;+1]
            int[] labels =
            {
                1, -1, -1, 1
            };

            // Create a Kernel Support Vector Machine for the given inputs
            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(0.1), inputs[0].Length);

            // Instantiate a new learning algorithm for SVMs
            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, labels);

            // Set up the learning algorithm
            smo.Complexity = 1.0;

            // Run the learning algorithm
            double error = smo.Run();

            // Instantiate the probabilistic learning calibration
            ProbabilisticOutputCalibration calibration = new ProbabilisticOutputCalibration(svm, inputs, labels);

            // Run the calibration algorithm
            double loglikelihood = calibration.Run();


            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            double probability;
            int decision = svm.Compute(inputs[0], out probability);

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(1, decision);
            Assert.AreEqual(0.74999975815069375, probability);
        }
        public void RunTest1()
        {
            double[][] inputs = 
            {
			    new double[] { -1, -1 },
			    new double[] { -1,  1 },
			    new double[] {  1, -1 },
			    new double[] {  1,  1 }
			};

            int[] outputs =
            {
				 -1,
				  1,
				  1,
				 -1 
			};

            KernelSupportVectorMachine svm = new KernelSupportVectorMachine(new Gaussian(3.6), 2);

            SequentialMinimalOptimization smo = new SequentialMinimalOptimization(svm, inputs, outputs);

            double error1 = smo.Run();

            Assert.AreEqual(0, error1);

            double[] distances = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out distances[i]);
                Assert.AreEqual(outputs[i], y);
            }


            ProbabilisticOutputCalibration target = new ProbabilisticOutputCalibration(svm, inputs, outputs);

            double ll0 = target.LogLikelihood(inputs, outputs);

            double ll1 = target.Run();

            double ll2 = target.LogLikelihood(inputs, outputs);

            Assert.AreEqual(3.4256203116918824, ll1);
            Assert.AreEqual(ll1, ll2);
            Assert.IsTrue(ll1 > ll0);

            double[] probs = new double[outputs.Length];
            for (int i = 0; i < outputs.Length; i++)
            {
                int y = svm.Compute(inputs[i], out probs[i]);
                Assert.AreEqual(outputs[i], y);
            }

            Assert.AreEqual(0.25, probs[0], 1e-5);
            Assert.AreEqual(0.75, probs[1], 1e-5);
            Assert.AreEqual(0.75, probs[2], 1e-5);
            Assert.AreEqual(0.25, probs[3], 1e-5);

            foreach (var p in probs)
                Assert.IsFalse(Double.IsNaN(p));

        }
        public void learn_test()
        {
            #region doc_learn
            double[][] inputs = // Example XOR problem
            {
                new double[] { 0, 0 }, // 0 xor 0: 1 (label +1)
                new double[] { 0, 1 }, // 0 xor 1: 0 (label -1)
                new double[] { 1, 0 }, // 1 xor 0: 0 (label -1)
                new double[] { 1, 1 }  // 1 xor 1: 1 (label +1)
            };

            int[] outputs = // XOR outputs
            {
                1, 0, 0, 1
            };

            // Instantiate a new SMO learning algorithm for SVMs
            var smo = new SequentialMinimalOptimization<Gaussian>()
            {
                Kernel = new Gaussian(0.1),
                Complexity = 1.0
            };

            // Learn a SVM using the algorithm
            var svm = smo.Learn(inputs, outputs);

            // Predict labels for each input sample
            bool[] predicted = svm.Decide(inputs);

            // Compute classification error
            double error = new ZeroOneLoss(outputs).Loss(predicted);
            
            // Instantiate the probabilistic calibration (using Platt's scaling)
            var calibration = new ProbabilisticOutputCalibration<Gaussian>(svm);

            // Run the calibration algorithm
            calibration.Learn(inputs, outputs); // returns the same machine

            // Predict probabilities of each input sample
            double[] probabilities = svm.Probability(inputs);

            // Compute the error based on a hard decision
            double loss = new BinaryCrossEntropyLoss(outputs).Loss(probabilities);

            // Compute the decision output for one of the input vectors,
            // while also retrieving the probability of the answer

            bool decision;
            double probability = svm.Probability(inputs[0], out decision);
            #endregion

            // At this point, decision is +1 with a probability of 75%

            Assert.AreEqual(true, decision);
            Assert.AreEqual(0, error);
            Assert.AreEqual(5.5451735748925355, loss);
            Assert.AreEqual(0.74999975815069375, probability, 1e-10);
            Assert.IsTrue(svm.IsProbabilistic);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[0]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[1]);
            Assert.AreEqual(-1.0986109988055595, svm.Weights[2]);
            Assert.AreEqual(1.0986109988055595, svm.Weights[3]);
        }