Example #1
0
        public void KernelFunctionCacheConstructorTest8()
        {
            double[][] inputs =
            {
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 },
            };

            IKernel kernel = new Polynomial(2);

            int cacheSize = inputs.Length;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(3, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            // upper half
            for (int i = 0; i < inputs.Length - 1; i++)
            {
                for (int j = i + 1; j < inputs.Length - 1; j++)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.Throws <InvalidOperationException>(() => target.GetLeastRecentlyUsedList(), "The cache is not using a LRU list.");
        }
Example #2
0
        public void KernelFunctionCacheConstructorTest7()
        {
            double[][] inputs =
            {
                new double[] { 0, 1 },
                new double[] { 1, 0 },
                new double[] { 1, 1 },
                new double[] { },
            };

            IKernel kernel = new Polynomial(2);

            int cacheSize = inputs.Length - 1;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(3, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            // upper half
            for (int i = 0; i < inputs.Length - 1; i++)
            {
                for (int j = i + 1; j < inputs.Length - 1; j++)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            var lruList1 = target.GetLeastRecentlyUsedList();

            Assert.AreEqual(2, target.Misses);
            Assert.AreEqual(1, target.Hits);
            Assert.AreEqual(0.66666, target.Usage, 1e-4);

            // upper half, backwards
            for (int i = inputs.Length - 2; i >= 0; i--)
            {
                for (int j = inputs.Length - 2; j >= i; j--)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual   = target.GetOrCompute(j, i);

                    Assert.AreEqual(expected, actual);
                }
            }

            var lruList2 = target.GetLeastRecentlyUsedList();

            Assert.IsTrue(lruList2.SequenceEqual(new[] { lruList1[1], lruList1[2], lruList1[0] }));

            Assert.AreEqual(2, target.Misses);
            Assert.AreEqual(4, target.Hits);
            Assert.AreEqual(0.666666, target.Usage, 1e-5);
        }
        public void KernelFunctionCacheConstructorTest4()
        {
            IKernel kernel = new Linear();

            int cacheSize = 100;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(inputs.Length, target.Size);
        }
Example #4
0
        public void KernelFunctionCacheConstructorTest2()
        {
            IKernel kernel = new Linear(1);

            int cacheSize = inputs.Length - 1;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(9, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = i * i + 1;
                double actual   = target.GetOrCompute(i);

                Assert.AreEqual(expected, actual);
            }

            Assert.AreEqual(0, target.Hits);

            int[] hits = { 0, 9, 18, 27, 36, 45, 54, 63, 72, 81 };
            int[] miss = { 9, 9, 9, 9, 9, 9, 9, 9, 9, 9 };

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }

                Assert.AreEqual(hits[i], target.Hits);
                Assert.AreEqual(miss[i], target.Misses);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.AreEqual(9, target.Misses);
            Assert.AreEqual(171, target.Hits);
            Assert.AreEqual(1.0, target.Usage);
        }
Example #5
0
        public void KernelFunctionCacheConstructorTest2()
        {
            IKernel kernel = new Linear(1);

            int cacheSize = inputs.Length - 1;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(9, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = i * i + 1;
                double actual   = target.GetOrCompute(i);

                Assert.AreEqual(expected, actual);
            }

            Assert.AreEqual(0, target.Hits);

            int[] hits = { 0, 1, 3, 6, 10, 15, 20, 25, 30, 35 };
            int[] miss = { 9, 17, 24, 30, 35, 39, 43, 47, 51, 55 };

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }

                Assert.AreEqual(hits[i], target.Hits);
                Assert.AreEqual(miss[i], target.Misses);
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.AreEqual(100, target.Misses);
            Assert.AreEqual(80, target.Hits);
            Assert.AreEqual(1.0, target.Usage);
        }
        public void KernelFunctionCacheConstructorTest3()
        {
            IKernel kernel = new Linear();

            int cacheSize = 5;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(5, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = i * i + 1;
                double actual   = target.GetOrCompute(i);

                Assert.AreEqual(expected, actual);
            }

            Assert.AreEqual(0, target.Hits);


            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.AreEqual(9, target.Hits);
            Assert.AreEqual(81, target.Misses);

            var snapshot = target.GetDataCache();

            foreach (var entry in snapshot)
            {
                double a = target.GetOrCompute(entry.Key.Item1, entry.Key.Item2);
                double b = target.GetOrCompute(entry.Key.Item2, entry.Key.Item1);

                Assert.AreEqual(a, b);
            }


            Assert.AreEqual(81, target.Misses);
            Assert.AreEqual(29, target.Hits);
            Assert.AreEqual(1.0, target.Usage);
        }
Example #7
0
        public void KernelFunctionCacheConstructorTest6()
        {
            IKernel kernel = new Gaussian(0.6);

            int cacheSize = inputs.Length;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(10, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = kernel.Function(inputs[i], inputs[i]);
                double actual   = target.GetOrCompute(i);

                Assert.AreEqual(expected, actual);
            }

            Assert.AreEqual(0, target.Hits);


            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = kernel.Function(inputs[i], inputs[j]);
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.AreEqual(0, target.Misses);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Usage);
        }
        /// <summary>
        /// Runs the main body of the learning algorithm.
        /// </summary>
        protected override void InnerRun()
        {
            TInput[] inputs  = Inputs;
            int[]    outputs = Outputs;
            this.ones = Vector.Create(outputs.Length, 1);

            // Create kernel function cache
            diagonal = new double[inputs.Length];
            cache    = new KernelFunctionCache <TKernel, TInput>(Kernel, inputs);
            for (int i = 0; i < diagonal.Length; i++)
            {
                diagonal[i] = Kernel.Function(inputs[i], inputs[i]) + 1.0 / C[i];
            }


            // 1. Solve to find nu and eta
            double[] eta = conjugateGradient(outputs);
            double[] nu  = conjugateGradient(ones);


            // 2. Compute  s = Y' eta
            double s = 0;

            for (int i = 0; i < outputs.Length; i++)
            {
                s += outputs[i] * eta[i];
            }


            // 3. Find solution
            double b = 0;

            for (int i = 0; i < eta.Length; i++)
            {
                b += eta[i];
            }
            b /= s;

            double[] alpha = new double[nu.Length];
            for (int i = 0; i < alpha.Length; i++)
            {
                alpha[i] = (nu[i] - eta[i] * b) * outputs[i];
            }

            Model.SupportVectors = inputs;
            Model.Weights        = alpha;
            Model.Threshold      = b;
        }
Example #9
0
        /// <summary>
        ///   Runs the LS-SVM algorithm.
        /// </summary>
        ///
        /// <param name="computeError">
        ///   True to compute error after the training
        ///   process completes, false otherwise. Default is true.
        /// </param>
        ///
        /// <returns>
        ///   The misclassification error rate of
        ///   the resulting support vector machine.
        /// </returns>
        ///
        public double Run(bool computeError)
        {
            // Create kernel function cache
            cache    = new KernelFunctionCache(kernel, inputs);
            diagonal = new double[inputs.Length];
            for (int i = 0; i < diagonal.Length; i++)
            {
                diagonal[i] = kernel.Function(inputs[i], inputs[i]) + gamma;
            }


            // 1. Solve to find nu and eta
            double[] eta = conjugateGradient(outputs);
            double[] nu  = conjugateGradient(ones);


            // 2. Compute  s = Y' eta
            double s = 0;

            for (int i = 0; i < outputs.Length; i++)
            {
                s += outputs[i] * eta[i];
            }


            // 3. Find solution
            double b = 0;

            for (int i = 0; i < eta.Length; i++)
            {
                b += eta[i];
            }
            b /= s;

            double[] alpha = new double[nu.Length];
            for (int i = 0; i < alpha.Length; i++)
            {
                alpha[i] = (nu[i] - eta[i] * b) * outputs[i];
            }

            machine.SupportVectors = inputs;
            machine.Weights        = alpha;
            machine.Threshold      = b;

            // Compute error if required.
            return((computeError) ? ComputeError(inputs, outputs) : 0.0);
        }
Example #10
0
        public void KernelFunctionCacheConstructorTest5()
        {
            IKernel kernel = new Linear(1);

            int cacheSize = inputs.Length - 1;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(9, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsTrue(target.Enabled);

            // upper half
            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = i + 1; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }


            Assert.AreEqual(9, target.Misses);
            Assert.AreEqual(36, target.Hits);
            Assert.AreEqual(1.0, target.Usage);

            // lower half
            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = i + 1; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(j, i);

                    Assert.AreEqual(expected, actual);
                }
            }


            Assert.AreEqual(9, target.Misses);
            Assert.AreEqual(81, target.Hits);
            Assert.AreEqual(1.0, target.Usage);
        }
Example #11
0
        public void KernelFunctionCacheConstructorTest()
        {
            IKernel kernel = new Linear(1);

            int cacheSize = 0;

            KernelFunctionCache target = new KernelFunctionCache(kernel, inputs, cacheSize);

            Assert.AreEqual(0, target.Size);
            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Misses);
            Assert.IsFalse(target.Enabled);

            for (int i = 0; i < inputs.Length; i++)
            {
                double expected = i * i + 1;
                double actual   = target.GetOrCompute(i);

                Assert.AreEqual(expected, actual);
            }

            Assert.AreEqual(0, target.Hits);

            for (int i = 0; i < inputs.Length; i++)
            {
                for (int j = 0; j < inputs.Length; j++)
                {
                    double expected = i * j + 1;
                    double actual   = target.GetOrCompute(i, j);

                    Assert.AreEqual(expected, actual);
                }
            }

            Assert.AreEqual(0, target.Hits);
            Assert.AreEqual(0, target.Usage);
        }
        /// <summary>
        ///   Runs the SMO algorithm.
        /// </summary>
        ///
        /// <param name="computeError">
        ///   True to compute error after the training
        ///   process completes, false otherwise. Default is true.
        /// </param>
        /// <param name="token">
        ///   A <see cref="CancellationToken"/> which can be used
        ///   to request the cancellation of the learning algorithm
        ///   when it is being run in another thread.
        /// </param>
        ///
        /// <returns>
        ///   The misclassification error rate of
        ///   the resulting support vector machine.
        /// </returns>
        ///
        public double Run(bool computeError, CancellationToken token)
        {
            // The SMO algorithm chooses to solve the smallest possible optimization problem
            // at every step. At every step, SMO chooses two Lagrange multipliers to jointly
            // optimize, finds the optimal values for these multipliers, and updates the SVM
            // to reflect the new optimal values.
            //
            // Reference: http://research.microsoft.com/en-us/um/people/jplatt/smoTR.pdf

            // The algorithm has been updated to implement the improvements suggested
            // by Keerthi et al. The code has been based on the pseudo-code available
            // on the author's technical report.
            //
            // Reference: http://www.cs.iastate.edu/~honavar/keerthi-svm.pdf


            // Initialize variables
            int samples   = inputs.Length;
            int dimension = inputs[0].Length;


            // Initialization heuristics
            if (useComplexityHeuristic)
            {
                c = EstimateComplexity(kernel, inputs);
            }

            int[] positives = outputs.Find(x => x == +1);
            int[] negatives = outputs.Find(x => x == -1);


            // If all examples are positive or negative, terminate
            //   learning early by directly setting the threshold.

            if (positives.Length == 0)
            {
                machine.SupportVectors = new double[0][];
                machine.Weights        = new double[0];
                machine.Threshold      = -1;
                return(0);
            }
            if (negatives.Length == 0)
            {
                machine.SupportVectors = new double[0][];
                machine.Weights        = new double[0];
                machine.Threshold      = +1;
                return(0);
            }


            if (useClassLabelProportion)
            {
                WeightRatio = positives.Length / (double)negatives.Length;
            }


            // Lagrange multipliers
            Array.Clear(alpha, 0, alpha.Length);

            if (isLinear) // Hyperplane weights
            {
                Array.Clear(weights, 0, weights.Length);
            }

            // Error cache
            Array.Clear(errors, 0, errors.Length);

            // Kernel evaluations cache
            this.kernelCache = new KernelFunctionCache(kernel, inputs, cacheSize);

            // [Keerthi] Initialize b_up to -1 and
            //   i_up to any one index of class 1:
            this.b_upper = -1;
            this.i_upper = positives[0];

            // [Keerthi] Initialize b_low to +1 and
            //   i_low to any one index of class 2:
            this.b_lower = +1;
            this.i_lower = negatives[0];

            // [Keerthi] Set error cache for i_low and i_up:
            this.errors[i_lower] = +1;
            this.errors[i_upper] = -1;


            // Prepare indices sets
            activeExamples.Clear();
            nonBoundExamples.Clear();
            atBoundsExamples.Clear();


            // Balance classes
            bool balanced = positiveWeight == 1 && negativeWeight == 1;

            positiveCost = c * positiveWeight;
            negativeCost = c * negativeWeight;



            // Algorithm:
            int  numChanged     = 0;
            int  wholeSetChecks = 0;
            bool examineAll     = true;
            bool diverged       = false;
            bool shouldStop     = false;

            while ((numChanged > 0 || examineAll) && !shouldStop)
            {
                numChanged = 0;
                if (examineAll)
                {
                    // loop I over all training examples
                    for (int i = 0; i < samples; i++)
                    {
                        if (examineExample(i))
                        {
                            numChanged++;
                        }
                    }

                    wholeSetChecks++;
                }
                else
                {
                    if (strategy == SelectionStrategy.Sequential)
                    {
                        if (balanced) // Assume balanced data
                        {
                            // loop I over examples not at bounds
                            for (int i = 0; i < alpha.Length; i++)
                            {
                                if (alpha[i] != 0 && alpha[i] != c)
                                {
                                    if (examineExample(i))
                                    {
                                        numChanged++;
                                    }

                                    if (b_upper > b_lower - 2.0 * tolerance)
                                    {
                                        numChanged = 0; break;
                                    }
                                }
                            }
                        }
                        else // Use different weights for classes
                        {
                            // loop I over examples not at bounds
                            for (int i = 0; i < alpha.Length; i++)
                            {
                                if (alpha[i] != 0)
                                {
                                    if (outputs[i] == +1)
                                    {
                                        if (alpha[i] == positiveCost)
                                        {
                                            continue;
                                        }
                                    }
                                    else // outputs[i] == -1
                                    {
                                        if (alpha[i] == negativeCost)
                                        {
                                            continue;
                                        }
                                    }

                                    if (examineExample(i))
                                    {
                                        numChanged++;
                                    }

                                    if (b_upper > b_lower - 2.0 * tolerance)
                                    {
                                        numChanged = 0; break;
                                    }
                                }
                            }
                        }
                    }
                    else // strategy == Strategy.WorstPair
                    {
                        int attempts = 0;
                        do
                        {
                            attempts++;

                            if (!takeStep(i_upper, i_lower))
                            {
                                break;
                            }

                            if (attempts > samples * maxChecks)
                            {
                                break;
                            }
                        }while ((b_upper <= b_lower - 2.0 * tolerance));

                        numChanged = 0;
                    }
                }

                if (examineAll)
                {
                    examineAll = false;
                }

                else if (numChanged == 0)
                {
                    examineAll = true;
                }

                if (wholeSetChecks > maxChecks)
                {
                    shouldStop = diverged = true;
                }

                if (token.IsCancellationRequested)
                {
                    shouldStop = true;
                }
            }


            // Store information about bounded examples
            if (balanced)
            {
                // Assume equal weights for classes
                for (int i = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] == c)
                    {
                        atBoundsExamples.Add(i);
                    }
                }
            }
            else
            {
                // Use different weights for classes
                for (int i = 0; i < alpha.Length; i++)
                {
                    if (outputs[i] == +1)
                    {
                        if (alpha[i] == positiveCost)
                        {
                            atBoundsExamples.Add(i);
                        }
                    }
                    else // outputs[i] == -1
                    {
                        if (alpha[i] == negativeCost)
                        {
                            atBoundsExamples.Add(i);
                        }
                    }
                }
            }

            if (isCompact)
            {
                // Store the hyperplane directly
                machine.SupportVectors = null;
                machine.Weights        = weights;
                machine.Threshold      = -(b_lower + b_upper) / 2.0;
            }
            else
            {
                // Store Support Vectors in the SV Machine. Only vectors which have Lagrange multipliers
                // greater than zero will be stored as only those are actually required during evaluation.

                int activeCount = activeExamples.Count;

                int[] idx = new int[activeCount];
                activeExamples.CopyTo(idx);

                machine.SupportVectors = new double[activeCount][];
                machine.Weights        = new double[activeCount];
                for (int i = 0; i < idx.Length; i++)
                {
                    int j = idx[i];
                    machine.SupportVectors[i] = inputs[j];
                    machine.Weights[i]        = alpha[j] * outputs[j];
                }
                machine.Threshold = -(b_lower + b_upper) / 2;
            }

            // Clear function cache
            this.kernelCache.Clear();
            this.kernelCache = null;

            if (diverged)
            {
                throw new ConvergenceException("Convergence could not be attained. " +
                                               "Please reduce the cost of misclassification errors by reducing " +
                                               "the complexity parameter C or try a different kernel function.");
            }

            // Compute error if required.
            return((computeError) ? ComputeError(inputs, outputs) : 0.0);
        }
Example #13
0
        /// <summary>
        ///   Runs the learning algorithm.
        /// </summary>
        ///
        protected override void InnerRun()
        {
            // Initialize variables
            int samples = Inputs.Length;

            double[] c = C;
            TInput[] x = Inputs;
            int[]    y = Outputs;

            // Lagrange multipliers
            this.alpha = new double[samples];

            // Prepare indices sets
            activeExamples   = new HashSet <int>();
            nonBoundExamples = new HashSet <int>();
            atBoundsExamples = new HashSet <int>();

            // Kernel cache
            if (this.cacheSize == -1)
            {
                this.cacheSize = samples;
            }

            using (this.kernelCache = new KernelFunctionCache <TKernel, TInput>(Kernel, Inputs, cacheSize))
            {
                bool diverged = false;


                if (Strategy == SelectionStrategy.SecondOrder)
                {
                    double[] minusOnes = Vector.Create(Inputs.Length, -1.0);
                    Func <int, int[], int, double[], double[]> Q;

                    if (kernelCache.Enabled)
                    {
                        Q = (int i, int[] indices, int length, double[] row) =>
                        {
                            for (int j = 0; j < length; j++)
                            {
                                row[j] = y[i] * y[indices[j]] * kernelCache.GetOrCompute(i, indices[j]);
                            }
                            return(row);
                        };
                    }
                    else
                    {
                        Q = (int i, int[] indices, int length, double[] row) =>
                        {
                            for (int j = 0; j < length; j++)
                            {
                                row[j] = y[i] * y[indices[j]] * Kernel.Function(x[i], x[indices[j]]);
                            }
                            return(row);
                        };
                    }

                    var s = new FanChenLinQuadraticOptimization(alpha.Length, Q, minusOnes, y)
                    {
                        Tolerance   = tolerance,
                        Shrinking   = this.shrinking,
                        Solution    = alpha,
                        Token       = Token,
                        UpperBounds = c
                    };

                    diverged = !s.Minimize();

                    // Store information about active examples
                    for (int i = 0; i < alpha.Length; i++)
                    {
                        if (alpha[i] > 0)
                        {
                            activeExamples.Add(i);
                        }
                    }

                    b_lower = b_upper = s.Rho;
                }
                else // Strategy is Strategy.WorstPair or Strategy.Sequential
                {
                    if (shrinking)
                    {
                        throw new InvalidOperationException("Shrinking heuristic can only be used if Strategy is set to SelectionStrategy.SecondOrder.");
                    }

                    // The SMO algorithm chooses to solve the smallest possible optimization problem
                    // at every step. At every step, SMO chooses two Lagrange multipliers to jointly
                    // optimize, finds the optimal values for these multipliers, and updates the SVM
                    // to reflect the new optimal values.
                    //
                    // Reference: http://research.microsoft.com/en-us/um/people/jplatt/smoTR.pdf

                    // The algorithm has been updated to implement the improvements suggested
                    // by Keerthi et al. The code has been based on the pseudo-code available
                    // on the author's technical report.
                    //
                    // Reference: http://www.cs.iastate.edu/~honavar/keerthi-svm.pdf


                    // Error cache
                    this.errors = new double[samples];

                    // [Keerthi] Initialize b_up to -1 and
                    //   i_up to any one index of class 1:
                    this.b_upper = -1;
                    this.i_upper = y.First(y_i => y_i > 0);

                    // [Keerthi] Initialize b_low to +1 and
                    //   i_low to any one index of class 2:
                    this.b_lower = +1;
                    this.i_lower = y.First(y_i => y_i < 0);

                    // [Keerthi] Set error cache for i_low and i_up:
                    this.errors[i_lower] = +1;
                    this.errors[i_upper] = -1;


                    // Algorithm:
                    int  numChanged     = 0;
                    int  wholeSetChecks = 0;
                    bool examineAll     = true;
                    bool shouldStop     = false;

                    while ((numChanged > 0 || examineAll) && !shouldStop)
                    {
                        if (Token.IsCancellationRequested)
                        {
                            break;
                        }

                        numChanged = 0;
                        if (examineAll)
                        {
                            // loop I over all training examples
                            for (int i = 0; i < samples; i++)
                            {
                                if (examineExample(i))
                                {
                                    numChanged++;
                                }
                            }

                            wholeSetChecks++;
                        }
                        else
                        {
                            if (strategy == SelectionStrategy.Sequential)
                            {
                                // loop I over examples not at bounds
                                for (int i = 0; i < alpha.Length; i++)
                                {
                                    if (alpha[i] != 0 && alpha[i] != c[i])
                                    {
                                        if (examineExample(i))
                                        {
                                            numChanged++;
                                        }

                                        if (b_upper > b_lower - 2.0 * tolerance)
                                        {
                                            numChanged = 0; break;
                                        }
                                    }
                                }
                            }
                            else if (strategy == SelectionStrategy.WorstPair)
                            {
                                int attempts = 0;
                                do
                                {
                                    attempts++;

                                    if (!takeStep(i_upper, i_lower))
                                    {
                                        break;
                                    }

                                    if (attempts > samples * maxChecks)
                                    {
                                        break;
                                    }
                                }while ((b_upper <= b_lower - 2.0 * tolerance));

                                numChanged = 0;
                            }
                            else
                            {
                                throw new InvalidOperationException("Unknown strategy");
                            }
                        }

                        if (examineAll)
                        {
                            examineAll = false;
                        }

                        else if (numChanged == 0)
                        {
                            examineAll = true;
                        }

                        if (wholeSetChecks > maxChecks)
                        {
                            shouldStop = diverged = true;
                        }

                        if (Token.IsCancellationRequested)
                        {
                            shouldStop = true;
                        }
                    }
                }


                // Store information about bounded examples
                for (int i = 0; i < alpha.Length; i++)
                {
                    if (alpha[i] == c[i])
                    {
                        atBoundsExamples.Add(i);
                    }
                }


                // Store Support Vectors in the SV Machine. Only vectors which have Lagrange multipliers
                // greater than zero will be stored as only those are actually required during evaluation.

                int activeCount = activeExamples.Count;
                Model.SupportVectors = new TInput[activeCount];
                Model.Weights        = new double[activeCount];
                int index = 0;
                foreach (var j in activeExamples)
                {
                    Model.SupportVectors[index] = x[j];
                    Model.Weights[index]        = alpha[j] * y[j];
                    index++;
                }

                Model.Threshold = -(b_lower + b_upper) / 2;

                if (isCompact)
                {
                    Model.Compress();
                }

                if (diverged)
                {
                    throw new ConvergenceException("Convergence could not be attained. " +
                                                   "Please reduce the cost of misclassification errors by reducing " +
                                                   "the complexity parameter C or try a different kernel function.");
                }
            }
        }
Example #14
0
        //---------------------------------------------


        /// <summary>
        ///   Runs the SMO algorithm.
        /// </summary>
        ///
        /// <param name="computeError">
        ///   True to compute error after the training
        ///   process completes, false otherwise. Default is true.
        /// </param>
        ///
        /// <returns>
        ///   The misclassification error rate of
        ///   the resulting support vector machine.
        /// </returns>
        ///
        public double Run(bool computeError)
        {
            // The SMO algorithm chooses to solve the smallest possible optimization problem
            // at every step. At every step, SMO chooses two Lagrange multipliers to jointly
            // optimize, finds the optimal values for these multipliers, and updates the SVM
            // to reflect the new optimal values.
            //
            // Reference: http://research.microsoft.com/en-us/um/people/jplatt/smoTR.pdf

            // The algorithm has been updated to implement the improvements suggested
            // by Keerthi et al. The code has been based on the pseudo-code available
            // on the author's technical report.
            //
            // Reference: http://www.cs.iastate.edu/~honavar/keerthi-svm.pdf


            // Initialize variables
            int samples   = inputs.Length;
            int dimension = inputs[0].Length;

            if (useComplexityHeuristic)
            {
                c = EstimateComplexity(kernel, inputs);
            }

            // Lagrange multipliers
            Array.Clear(alpha, 0, alpha.Length);

            if (isLinear) // Hyperplane weights
            {
                Array.Clear(weights, 0, weights.Length);
            }

            // Error cache
            Array.Clear(errors, 0, errors.Length);

            // Kernel evaluations cache
            this.kernelCache = new KernelFunctionCache(kernel, inputs, cacheSize);

            // [Keerthi] Initialize b_up to -1 and
            //   i_up to any one index of class 1:
            this.b_upper = -1;
            this.i_upper = outputs.Find(x => x == +1)[0];

            // [Keerthi] Initialize b_low to +1 and
            //   i_low to any one index of class 2:
            this.b_lower = +1;
            this.i_lower = outputs.Find(x => x == -1)[0];

            // [Keerthi] Set error cache for i_low and i_up:
            this.errors[i_lower] = +1;
            this.errors[i_upper] = -1;


            // Prepare indice sets
            activeExamples   = new HashSet <int>();
            nonBoundExamples = new HashSet <int>();
            atBoundsExamples = new HashSet <int>();


            // Algorithm:
            int  numChanged = 0;
            bool examineAll = true;

            while (numChanged > 0 || examineAll)
            {
                numChanged = 0;
                if (examineAll)
                {
                    // loop I over all training examples
                    for (int i = 0; i < samples; i++)
                    {
                        if (examineExample(i))
                        {
                            numChanged++;
                        }
                    }
                }
                else
                {
                    if (strategy == SelectionStrategy.Sequential)
                    {
                        // loop I over examples not at bounds
                        for (int i = 0; i < alpha.Length; i++)
                        {
                            if (alpha[i] != 0 && alpha[i] != c)
                            {
                                if (examineExample(i))
                                {
                                    numChanged++;
                                }

                                if (b_upper > b_lower - 2.0 * tolerance)
                                {
                                    numChanged = 0; break;
                                }
                            }
                        }
                    }
                    else // strategy == Strategy.WorstPair
                    {
                        bool success;
                        do
                        {
                            success = takeStep(i_upper, i_lower);
                        }while ((b_upper <= b_lower - 2.0 * tolerance) && success);

                        numChanged = 0;
                    }
                }

                if (examineAll)
                {
                    examineAll = false;
                }

                else if (numChanged == 0)
                {
                    examineAll = true;
                }
            }


            // Store information about bounded examples
            for (int i = 0; i < alpha.Length; i++)
            {
                if (alpha[i] == c)
                {
                    atBoundsExamples.Add(i);
                }
            }

            if (isCompact)
            {
                // Store the hyperplane directly
                machine.SupportVectors = null;
                machine.Weights        = weights;
                machine.Threshold      = -(b_lower + b_upper) / 2.0;
            }
            else
            {
                // Store Support Vectors in the SV Machine. Only vectors which have lagrange multipliers
                // greater than zero will be stored as only those are actually required during evaluation.

                int activeCount = activeExamples.Count;

                int[] idx = new int[activeCount];
                activeExamples.CopyTo(idx);

                machine.SupportVectors = new double[activeCount][];
                machine.Weights        = new double[activeCount];
                for (int i = 0; i < idx.Length; i++)
                {
                    int j = idx[i];
                    machine.SupportVectors[i] = inputs[j];
                    machine.Weights[i]        = alpha[j] * outputs[j];
                }
                machine.Threshold = -(b_lower + b_upper) / 2;
            }

            // Clear function cache
            this.kernelCache.Clear();

            // Compute error if required.
            return((computeError) ? ComputeError(inputs, outputs) : 0.0);
        }
Example #15
0
        /// <summary>
        ///   Runs the learning algorithm.
        /// </summary>
        ///
        protected override void InnerRun()
        {
            // The SMO algorithm chooses to solve the smallest possible optimization problem
            // at every step. At every step, SMO chooses two Lagrange multipliers to jointly
            // optimize, finds the optimal values for these multipliers, and updates the SVM
            // to reflect the new optimal values.
            //
            // Reference: http://research.microsoft.com/en-us/um/people/jplatt/smoTR.pdf

            // The algorithm has been updated to implement the improvements suggested
            // by Keerthi et al. The code has been based on the pseudo-code available
            // on the author's technical report.
            //
            // Reference: http://www.cs.iastate.edu/~honavar/keerthi-svm.pdf


            // Initialize variables
            int samples = Inputs.Length;

            double[] c = C;
            TInput[] x = Inputs;
            int[]    y = Outputs;

            // Lagrange multipliers
            this.alpha = new double[samples];

            // Error cache
            this.errors = new double[samples];

            // Kernel cache
            if (this.cacheSize == -1)
            {
                this.cacheSize = samples;
            }


            // Lagrange multipliers
            Array.Clear(alpha, 0, alpha.Length);

            // Error cache
            Array.Clear(errors, 0, errors.Length);

            // Kernel evaluations cache
            this.kernelCache = new KernelFunctionCache <TKernel, TInput>(Kernel, Inputs, cacheSize);

            // [Keerthi] Initialize b_up to -1 and
            //   i_up to any one index of class 1:
            this.b_upper = -1;
            this.i_upper = y.First(y_i => y_i > 0);

            // [Keerthi] Initialize b_low to +1 and
            //   i_low to any one index of class 2:
            this.b_lower = +1;
            this.i_lower = y.First(y_i => y_i < 0);

            // [Keerthi] Set error cache for i_low and i_up:
            this.errors[i_lower] = +1;
            this.errors[i_upper] = -1;


            // Prepare indices sets
            activeExamples   = new HashSet <int>();
            nonBoundExamples = new HashSet <int>();
            atBoundsExamples = new HashSet <int>();

            // Algorithm:
            int  numChanged     = 0;
            int  wholeSetChecks = 0;
            bool examineAll     = true;
            bool diverged       = false;
            bool shouldStop     = false;

            while ((numChanged > 0 || examineAll) && !shouldStop)
            {
                if (Token.IsCancellationRequested)
                {
                    break;
                }

                numChanged = 0;
                if (examineAll)
                {
                    // loop I over all training examples
                    for (int i = 0; i < samples; i++)
                    {
                        if (examineExample(i))
                        {
                            numChanged++;
                        }
                    }

                    wholeSetChecks++;
                }
                else
                {
                    if (strategy == SelectionStrategy.Sequential)
                    {
                        // loop I over examples not at bounds
                        for (int i = 0; i < alpha.Length; i++)
                        {
                            if (alpha[i] != 0 && alpha[i] != c[i])
                            {
                                if (examineExample(i))
                                {
                                    numChanged++;
                                }

                                if (b_upper > b_lower - 2.0 * tolerance)
                                {
                                    numChanged = 0; break;
                                }
                            }
                        }
                    }
                    else // strategy == Strategy.WorstPair
                    {
                        int attempts = 0;
                        do
                        {
                            attempts++;

                            if (!takeStep(i_upper, i_lower))
                            {
                                break;
                            }

                            if (attempts > samples * maxChecks)
                            {
                                break;
                            }
                        }while ((b_upper <= b_lower - 2.0 * tolerance));

                        numChanged = 0;
                    }
                }

                if (examineAll)
                {
                    examineAll = false;
                }

                else if (numChanged == 0)
                {
                    examineAll = true;
                }

                if (wholeSetChecks > maxChecks)
                {
                    shouldStop = diverged = true;
                }

                if (Token.IsCancellationRequested)
                {
                    shouldStop = true;
                }
            }


            // Store information about bounded examples
            for (int i = 0; i < alpha.Length; i++)
            {
                if (alpha[i] == c[i])
                {
                    atBoundsExamples.Add(i);
                }
            }


            // Store Support Vectors in the SV Machine. Only vectors which have Lagrange multipliers
            // greater than zero will be stored as only those are actually required during evaluation.

            int activeCount = activeExamples.Count;

            Model.SupportVectors = new TInput[activeCount];
            Model.Weights        = new double[activeCount];
            int index = 0;

            foreach (var j in activeExamples)
            {
                Model.SupportVectors[index] = x[j];
                Model.Weights[index]        = alpha[j] * y[j];
                index++;
            }
            Model.Threshold = -(b_lower + b_upper) / 2;

            if (isCompact)
            {
                Model.Compress();
            }

            // Clear function cache
            this.kernelCache.Clear();
            this.kernelCache = null;

            if (diverged)
            {
                throw new ConvergenceException("Convergence could not be attained. " +
                                               "Please reduce the cost of misclassification errors by reducing " +
                                               "the complexity parameter C or try a different kernel function.");
            }
        }