Exemple #1
0
        private static void solve_epsilon_svr(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si)
        {
            int l = prob.Lenght;

            double[] alpha2      = new double[2 * l];
            double[] linear_term = new double[2 * l];
            sbyte[]  y           = new sbyte[2 * l];
            int      i;

            for (i = 0; i < l; i++)
            {
                alpha2[i]      = 0;
                linear_term[i] = param.P - prob.Y[i];
                y[i]           = 1;

                alpha2[i + l]      = 0;
                linear_term[i + l] = param.P + prob.Y[i];
                y[i + l]           = -1;
            }

            Solver s = new Solver();

            s.Solve(2 * l, new SvrQ(prob, param), linear_term, y,
                    alpha2, param.C, param.C, param.Eps, si, param.Shrinking);

            double sum_alpha = 0;

            for (i = 0; i < l; i++)
            {
                alpha[i]   = alpha2[i] - alpha2[i + l];
                sum_alpha += Math.Abs(alpha[i]);
            }
            Svm.info("nu = " + sum_alpha / (param.C * l) + "\n");
        }
Exemple #2
0
        protected void reconstruct_gradient()
        {
            // reconstruct inactive elements of G from G_bar and free variables

            if (_activeSize == _length)
            {
                return;
            }

            int i, j;
            int nr_free = 0;

            for (j = _activeSize; j < _length; j++)
            {
                _g[j] = _gBar[j] + _p[j];
            }

            for (j = 0; j < _activeSize; j++)
            {
                if (is_free(j))
                {
                    nr_free++;
                }
            }

            if (2 * nr_free < _activeSize)
            {
                Svm.info("\nWARNING: using -h 0 may be faster\n");
            }

            if (nr_free * _length > 2 * _activeSize * (_length - _activeSize))
            {
                for (i = _activeSize; i < _length; i++)
                {
                    double[] Q_i = _q.GetQ(i, _activeSize);
                    for (j = 0; j < _activeSize; j++)
                    {
                        if (is_free(j))
                        {
                            _g[i] += _alpha[j] * Q_i[j];
                        }
                    }
                }
            }
            else
            {
                for (i = 0; i < _activeSize; i++)
                {
                    if (is_free(i))
                    {
                        double[] Q_i     = _q.GetQ(i, _length);
                        double   alpha_i = _alpha[i];
                        for (j = _activeSize; j < _length; j++)
                        {
                            _g[j] += alpha_i * Q_i[j];
                        }
                    }
                }
            }
        }
Exemple #3
0
        //from Svm.svm_predict_probability
        public double PredictProbability(SvmNode[] x, double[] prob_estimates)
        {
            if (!SvmType.IsSVC() || ProbA == null || ProbB == null)
            {
                return(Predict(x));
            }

            int nr_class = NrClass;

            double[] dec_values = new double[nr_class * (nr_class - 1) / 2];
            PredictValues(x, dec_values);

            double min_prob = 1e-7;

            double[][] pairwise_prob = new double[nr_class][];
            for (int i = 0; i < nr_class; i++)
            {
                pairwise_prob[i] = new double[nr_class];
            }

            int k = 0;

            for (int i = 0; i < nr_class; i++)
            {
                for (int j = i + 1; j < nr_class; j++)
                {
                    pairwise_prob[i][j] = Math.Min(Math.Max(Svm.sigmoid_predict(dec_values[k], ProbA[k], ProbB[k]), min_prob), 1 - min_prob);
                    pairwise_prob[j][i] = 1 - pairwise_prob[i][j];
                    k++;
                }
            }
            Svm.multiclass_probability(nr_class, pairwise_prob, prob_estimates);

            int prob_max_idx = 0;

            for (int i = 1; i < nr_class; i++)
            {
                if (prob_estimates[i] > prob_estimates[prob_max_idx])
                {
                    prob_max_idx = i;
                }
            }
            return(Label[prob_max_idx]);
        }
Exemple #4
0
        private static void solve_c_svc(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si, double Cp, double Cn)
        {
            int l = prob.Lenght;

            double[] minus_ones = new double[l];
            sbyte[]  y          = new sbyte[l];

            for (int i = 0; i < l; i++)
            {
                alpha[i]      = 0;
                minus_ones[i] = -1;

                if (prob.Y[i] > 0)
                {
                    y[i] = +1;
                }
                else
                {
                    y[i] = -1;
                }
            }

            Solver s = new Solver();

            s.Solve(l, new SvcQ(prob, param, y), minus_ones, y,
                    alpha, Cp, Cn, param.Eps, si, param.Shrinking);

            double sum_alpha = 0;

            for (int i = 0; i < l; i++)
            {
                sum_alpha += alpha[i];
            }

            if (Cp == Cn)
            {
                Svm.info("nu = " + sum_alpha / (Cp * prob.Lenght) + "\n");
            }

            for (int i = 0; i < l; i++)
            {
                alpha[i] *= y[i];
            }
        }
Exemple #5
0
        // Return parameter of a Laplace distribution
        private static double svm_svr_probability(SvmProblem prob, SvmParameter param)
        {
            int i;
            int nr_fold = 5;

            double[] ymv = new double[prob.Lenght];
            double   mae = 0;

            var newparam = (SvmParameter)param.Clone();

            newparam.Probability = false;
            CrossValidation(prob, newparam, nr_fold, ymv);
            for (i = 0; i < prob.Lenght; i++)
            {
                ymv[i] = prob.Y[i] - ymv[i];
                mae   += Math.Abs(ymv[i]);
            }
            mae /= prob.Lenght;
            double std   = Math.Sqrt(2 * mae * mae);
            int    count = 0;

            mae = 0;
            for (i = 0; i < prob.Lenght; i++)
            {
                if (Math.Abs(ymv[i]) > 5 * std)
                {
                    count = count + 1;
                }
                else
                {
                    mae += Math.Abs(ymv[i]);
                }
            }
            mae /= (prob.Lenght - count);
            Svm.info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + mae + "\n");
            return(mae);
        }
Exemple #6
0
        private static void solve_nu_svr(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si)
        {
            int    l = prob.Lenght;
            double C = param.C;

            double[] alpha2      = new double[2 * l];
            double[] linear_term = new double[2 * l];
            sbyte[]  y           = new sbyte[2 * l];
            int      i;

            double sum = C * param.Nu * l / 2;

            for (i = 0; i < l; i++)
            {
                alpha2[i] = alpha2[i + l] = Math.Min(sum, C);
                sum      -= alpha2[i];

                linear_term[i] = -prob.Y[i];
                y[i]           = 1;

                linear_term[i + l] = prob.Y[i];
                y[i + l]           = -1;
            }

            var s = new SolverNu();

            s.Solve(2 * l, new SvrQ(prob, param), linear_term, y,
                    alpha2, C, C, param.Eps, si, param.Shrinking);

            Svm.info("epsilon = " + (-si.R) + "\n");

            for (i = 0; i < l; i++)
            {
                alpha[i] = alpha2[i] - alpha2[i + l];
            }
        }
Exemple #7
0
        public virtual void Solve(int length, QMatrix Q, double[] p_, sbyte[] y_,
                                  double[] alpha_, double Cp, double Cn, double eps, SolutionInfo si, bool shrinking)
        {
            this._length   = length;
            this._q        = Q;
            _qd            = Q.GetQD();
            _p             = (double[])p_.Clone();
            _y             = (sbyte[])y_.Clone();
            _alpha         = (double[])alpha_.Clone();
            this._cp       = Cp;
            this._cn       = Cn;
            this._eps      = eps;
            this._unshrink = false;

            // initialize alpha_status
            {
                _alphaStatus = new BoundType[length];
                for (int i = 0; i < length; i++)
                {
                    UpdateAlphaStatus(i);
                }
            }

            // initialize active set (for shrinking)
            {
                _activeSet = new int[length];
                for (int i = 0; i < length; i++)
                {
                    _activeSet[i] = i;
                }
                _activeSize = length;
            }

            // initialize gradient
            {
                _g    = new double[length];
                _gBar = new double[length];
                int i;
                for (i = 0; i < length; i++)
                {
                    _g[i]    = _p[i];
                    _gBar[i] = 0;
                }
                for (i = 0; i < length; i++)
                {
                    if (!is_lower_bound(i))
                    {
                        double[] Q_i     = Q.GetQ(i, length);
                        double   alpha_i = _alpha[i];
                        int      j;
                        for (j = 0; j < length; j++)
                        {
                            _g[j] += alpha_i * Q_i[j];
                        }
                        if (is_upper_bound(i))
                        {
                            for (j = 0; j < length; j++)
                            {
                                _gBar[j] += GetC(i) * Q_i[j];
                            }
                        }
                    }
                }
            }

            // optimization step

            int iter     = 0;
            int max_iter = Math.Max(10000000, length > int.MaxValue / 100 ? int.MaxValue : 100 * length);
            int counter  = Math.Min(length, 1000) + 1;

            int[] working_set = new int[2];

            while (iter < max_iter)
            {
                // show progress and do shrinking

                if (--counter == 0)
                {
                    counter = Math.Min(length, 1000);
                    if (shrinking)
                    {
                        do_shrinking();
                    }
                    Svm.info(".");
                }

                if (select_working_set(working_set) != 0)
                {
                    // reconstruct the whole gradient
                    reconstruct_gradient();
                    // reset active set size and check
                    _activeSize = length;
                    Svm.info("*");
                    if (select_working_set(working_set) != 0)
                    {
                        break;
                    }
                    else
                    {
                        counter = 1; // do shrinking next iteration
                    }
                }

                int i = working_set[0];
                int j = working_set[1];

                ++iter;

                // update alpha[i] and alpha[j], handle bounds carefully

                double[] Q_i = Q.GetQ(i, _activeSize);
                double[] Q_j = Q.GetQ(j, _activeSize);

                double C_i = GetC(i);
                double C_j = GetC(j);

                double old_alpha_i = _alpha[i];
                double old_alpha_j = _alpha[j];

                if (_y[i] != _y[j])
                {
                    double quad_coef = _qd[i] + _qd[j] + 2 * Q_i[j];
                    if (quad_coef <= 0)
                    {
                        quad_coef = 1e-12;
                    }
                    double delta = (-_g[i] - _g[j]) / quad_coef;
                    double diff  = _alpha[i] - _alpha[j];
                    _alpha[i] += delta;
                    _alpha[j] += delta;

                    if (diff > 0)
                    {
                        if (_alpha[j] < 0)
                        {
                            _alpha[j] = 0;
                            _alpha[i] = diff;
                        }
                    }
                    else
                    {
                        if (_alpha[i] < 0)
                        {
                            _alpha[i] = 0;
                            _alpha[j] = -diff;
                        }
                    }
                    if (diff > C_i - C_j)
                    {
                        if (_alpha[i] > C_i)
                        {
                            _alpha[i] = C_i;
                            _alpha[j] = C_i - diff;
                        }
                    }
                    else
                    {
                        if (_alpha[j] > C_j)
                        {
                            _alpha[j] = C_j;
                            _alpha[i] = C_j + diff;
                        }
                    }
                }
                else
                {
                    double quad_coef = _qd[i] + _qd[j] - 2 * Q_i[j];
                    if (quad_coef <= 0)
                    {
                        quad_coef = 1e-12;
                    }
                    double delta = (_g[i] - _g[j]) / quad_coef;
                    double sum   = _alpha[i] + _alpha[j];
                    _alpha[i] -= delta;
                    _alpha[j] += delta;

                    if (sum > C_i)
                    {
                        if (_alpha[i] > C_i)
                        {
                            _alpha[i] = C_i;
                            _alpha[j] = sum - C_i;
                        }
                    }
                    else
                    {
                        if (_alpha[j] < 0)
                        {
                            _alpha[j] = 0;
                            _alpha[i] = sum;
                        }
                    }
                    if (sum > C_j)
                    {
                        if (_alpha[j] > C_j)
                        {
                            _alpha[j] = C_j;
                            _alpha[i] = sum - C_j;
                        }
                    }
                    else
                    {
                        if (_alpha[i] < 0)
                        {
                            _alpha[i] = 0;
                            _alpha[j] = sum;
                        }
                    }
                }

                // update G
                double delta_alpha_i = _alpha[i] - old_alpha_i;
                double delta_alpha_j = _alpha[j] - old_alpha_j;

                for (int k = 0; k < _activeSize; k++)
                {
                    _g[k] += Q_i[k] * delta_alpha_i + Q_j[k] * delta_alpha_j;
                }

                // update alpha_status and G_bar
                bool ui = is_upper_bound(i);
                bool uj = is_upper_bound(j);
                UpdateAlphaStatus(i);
                UpdateAlphaStatus(j);
                //int k;
                if (ui != is_upper_bound(i))
                {
                    Q_i = Q.GetQ(i, length);
                    if (ui)
                    {
                        for (int k = 0; k < length; k++)
                        {
                            _gBar[k] -= C_i * Q_i[k];
                        }
                    }
                    else
                    {
                        for (int k = 0; k < length; k++)
                        {
                            _gBar[k] += C_i * Q_i[k];
                        }
                    }
                }

                if (uj != is_upper_bound(j))
                {
                    Q_j = Q.GetQ(j, length);
                    if (uj)
                    {
                        for (int k = 0; k < length; k++)
                        {
                            _gBar[k] -= C_j * Q_j[k];
                        }
                    }
                    else
                    {
                        for (int k = 0; k < length; k++)
                        {
                            _gBar[k] += C_j * Q_j[k];
                        }
                    }
                }
            }

            if (iter >= max_iter)
            {
                if (_activeSize < length)
                {
                    // reconstruct the whole gradient to calculate objective value
                    reconstruct_gradient();
                    _activeSize = length;
                    Svm.info("*");
                }
                Svm.info("\nWARNING: reaching max number of iterations");
            }

            // calculate rho
            si.Rho = calculate_rho();

            // calculate objective value
            double v = 0;

            for (int i = 0; i < length; i++)
            {
                v += _alpha[i] * (_g[i] + _p[i]);
            }
            si.Obj = v / 2;

            // put back the solution
            for (int i = 0; i < length; i++)
            {
                alpha_[_activeSet[i]] = _alpha[i];
            }

            si.UpperBoundP = Cp;
            si.UpperBoundN = Cn;

            Svm.info("\noptimization finished, #iter = " + iter + "\n");
        }
Exemple #8
0
        //
        // Interface functions
        //
        public static SvmModel Train(SvmProblem prob, SvmParameter param)
        {
            var model = new SvmModel();

            model.Param = param;

            if (param.SvmType.IsSVROrOneClass())
            {
                // regression or one-class-svm
                model.NrClass = 2;
                model.Label   = null;
                model.SupportVectorsNumbers = null;
                model.ProbA = null; model.ProbB = null;
                model.SupportVectorsCoefficients = new double[1][];

                if (param.Probability && param.SvmType.IsSVR())
                {
                    model.ProbA    = new double[1];
                    model.ProbA[0] = svm_svr_probability(prob, param);
                }

                DecisionFunction f = svm_train_one(prob, param, 0, 0);
                model.Rho    = new double[1];
                model.Rho[0] = f.Rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.Lenght; i++)
                {
                    if (Math.Abs(f.Alpha[i]) > 0)
                    {
                        ++nSV;
                    }
                }
                model.TotalSupportVectorsNumber     = nSV;
                model.SupportVectors                = new SvmNode[nSV][];
                model.SupportVectorsCoefficients[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.Lenght; i++)
                {
                    if (Math.Abs(f.Alpha[i]) > 0)
                    {
                        model.SupportVectors[j] = prob.X[i];
                        model.SupportVectorsCoefficients[0][j] = f.Alpha[i];
                        ++j;
                    }
                }
            }
            else
            {
                // classification
                int   l    = prob.Lenght;
                int[] perm = new int[l];

                int   nr_class;
                int[] label;
                int[] start;
                int[] count;

                // group training data of the same class
                svm_group_classes(prob, out nr_class, out label, out start, out count, perm);

                if (nr_class == 1)
                {
                    Svm.info("WARNING: training data in only one class. See README for details.\n");
                }

                SvmNode[][] x = new SvmNode[l][];
                int         i;
                for (i = 0; i < l; i++)
                {
                    x[i] = prob.X[perm[i]];
                }

                // calculate weighted C

                double[] weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    weighted_C[i] = param.C;
                }
                for (i = 0; i < param.WeightsCount; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                    {
                        if (param.WeightLabel[i] == label[j])
                        {
                            break;
                        }
                    }
                    if (j == nr_class)
                    {
                        System.Diagnostics.Debug.WriteLine("WARNING: class label " + param.WeightLabel[i] + " specified in weight is not found\n");
                    }
                    else
                    {
                        weighted_C[j] *= param.Weight[i];
                    }
                }

                // train k*(k-1)/2 models

                var nonzero = new bool[l];
                for (i = 0; i < l; i++)
                {
                    nonzero[i] = false;
                }
                var f = new DecisionFunction[nr_class * (nr_class - 1) / 2];

                double[] probA = null, probB = null;
                if (param.Probability)
                {
                    probA = new double[nr_class * (nr_class - 1) / 2];
                    probB = new double[nr_class * (nr_class - 1) / 2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                {
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        var subprobLenght = ci + cj;
                        var sub_prob      = new SvmProblem
                        {
                            X = new SvmNode[subprobLenght][],
                            Y = new double[subprobLenght]
                        };

                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.X[k] = x[si + k];
                            sub_prob.Y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.X[ci + k] = x[sj + k];
                            sub_prob.Y[ci + k] = -1;
                        }

                        if (param.Probability)
                        {
                            double[] probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                        {
                            if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0)
                            {
                                nonzero[si + k] = true;
                            }
                        }
                        for (k = 0; k < cj; k++)
                        {
                            if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0)
                            {
                                nonzero[sj + k] = true;
                            }
                        }
                        ++p;
                    }
                }

                // build output

                model.NrClass = nr_class;

                model.Label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    model.Label[i] = label[i];
                }

                model.Rho = new double[nr_class * (nr_class - 1) / 2];
                for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                {
                    model.Rho[i] = f[i].Rho;
                }

                if (param.Probability)
                {
                    model.ProbA = new double[nr_class * (nr_class - 1) / 2];
                    model.ProbB = new double[nr_class * (nr_class - 1) / 2];
                    for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    {
                        model.ProbA[i] = probA[i];
                        model.ProbB[i] = probB[i];
                    }
                }
                else
                {
                    model.ProbA = null;
                    model.ProbB = null;
                }

                int   nnz      = 0;
                int[] nz_count = new int[nr_class];
                model.SupportVectorsNumbers = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                    {
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    }
                    model.SupportVectorsNumbers[i] = nSV;
                    nz_count[i] = nSV;
                }

                Svm.info("Total nSV = " + nnz + "\n");

                model.TotalSupportVectorsNumber = nnz;
                model.SupportVectors            = new SvmNode[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                {
                    if (nonzero[i])
                    {
                        model.SupportVectors[p++] = x[i];
                    }
                }

                int[] nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                {
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];
                }

                model.SupportVectorsCoefficients = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                {
                    model.SupportVectorsCoefficients[i] = new double[nnz];
                }

                p = 0;
                for (i = 0; i < nr_class; i++)
                {
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            if (nonzero[si + k])
                            {
                                model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k];
                            }
                        }
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                        {
                            if (nonzero[sj + k])
                            {
                                model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k];
                            }
                        }
                        ++p;
                    }
                }
            }
            return(model);
        }
Exemple #9
0
        private static void solve_nu_svc(SvmProblem prob, SvmParameter param, double[] alpha, SolutionInfo si)
        {
            int    i;
            int    l  = prob.Lenght;
            double nu = param.Nu;

            sbyte[] y = new sbyte[l];

            for (i = 0; i < l; i++)
            {
                if (prob.Y[i] > 0)
                {
                    y[i] = +1;
                }
                else
                {
                    y[i] = -1;
                }
            }

            double sum_pos = nu * l / 2;
            double sum_neg = nu * l / 2;

            for (i = 0; i < l; i++)
            {
                if (y[i] == +1)
                {
                    alpha[i] = Math.Min(1.0, sum_pos);
                    sum_pos -= alpha[i];
                }
                else
                {
                    alpha[i] = Math.Min(1.0, sum_neg);
                    sum_neg -= alpha[i];
                }
            }

            double[] zeros = new double[l];

            for (i = 0; i < l; i++)
            {
                zeros[i] = 0;
            }

            var s = new SolverNu();

            s.Solve(l, new SvcQ(prob, param, y), zeros, y,
                    alpha, 1.0, 1.0, param.Eps, si, param.Shrinking);
            double r = si.R;

            Svm.info("C = " + 1 / r + "\n");

            for (i = 0; i < l; i++)
            {
                alpha[i] *= y[i] / r;
            }

            si.Rho        /= r;
            si.Obj        /= (r * r);
            si.UpperBoundP = 1 / r;
            si.UpperBoundN = 1 / r;
        }
Exemple #10
0
        // Method 2 from the multiclass_prob paper by Wu, Lin, and Weng
        internal static void multiclass_probability(int k, double[][] r, double[] p)
        {
            int t, j;
            int iter = 0, max_iter = Math.Max(100, k);

            //double[][] Q = new double[k][k];
            double[][] Q = new double[k][];
            for (int i = 0; i < k; i++)
            {
                Q[i] = new double[k];
            }

            double[] Qp = new double[k];
            double   pQp, eps = 0.005 / k;

            for (t = 0; t < k; t++)
            {
                p[t]    = 1.0 / k; // Valid if k = 1
                Q[t][t] = 0;
                for (j = 0; j < t; j++)
                {
                    Q[t][t] += r[j][t] * r[j][t];
                    Q[t][j]  = Q[j][t];
                }
                for (j = t + 1; j < k; j++)
                {
                    Q[t][t] += r[j][t] * r[j][t];
                    Q[t][j]  = -r[j][t] * r[t][j];
                }
            }
            for (iter = 0; iter < max_iter; iter++)
            {
                // stopping condition, recalculate QP,pQP for numerical accuracy
                pQp = 0;
                for (t = 0; t < k; t++)
                {
                    Qp[t] = 0;
                    for (j = 0; j < k; j++)
                    {
                        Qp[t] += Q[t][j] * p[j];
                    }
                    pQp += p[t] * Qp[t];
                }
                double max_error = 0;
                for (t = 0; t < k; t++)
                {
                    double error = Math.Abs(Qp[t] - pQp);
                    if (error > max_error)
                    {
                        max_error = error;
                    }
                }
                if (max_error < eps)
                {
                    break;
                }

                for (t = 0; t < k; t++)
                {
                    double diff = (-Qp[t] + pQp) / Q[t][t];
                    p[t] += diff;
                    pQp   = (pQp + diff * (diff * Q[t][t] + 2 * Qp[t])) / (1 + diff) / (1 + diff);
                    for (j = 0; j < k; j++)
                    {
                        Qp[j] = (Qp[j] + diff * Q[t][j]) / (1 + diff);
                        p[j] /= (1 + diff);
                    }
                }
            }
            if (iter >= max_iter)
            {
                Svm.info("Exceeds max_iter in multiclass_prob\n");
            }
        }
Exemple #11
0
        // Platt's binary SVM Probablistic Output: an improvement from Lin et al.
        private static void sigmoid_train(int l, double[] dec_values, double[] labels, double[] probAB)
        {
            //double A, B;
            double prior1 = 0, prior0 = 0;

            //int i;

            for (int i = 0; i < l; i++)
            {
                if (labels[i] > 0)
                {
                    prior1 += 1;
                }
                else
                {
                    prior0 += 1;
                }
            }

            const int    max_iter = 100;   // Maximal number of iterations
            const double min_step = 1e-10; // Minimal step taken in line search
            const double sigma    = 1e-12; // For numerically strict PD of Hessian
            const double eps      = 1e-5;

            double hiTarget = (prior1 + 1.0) / (prior1 + 2.0);
            double loTarget = 1 / (prior0 + 2.0);

            double[] t = new double[l];
            double   fApB;
            int      iter;

            // Initial Point and Initial Fun Value
            double A = 0.0;
            double B = Math.Log((prior0 + 1.0) / (prior1 + 1.0));

            double fval = 0.0;

            for (int i = 0; i < l; i++)
            {
                if (labels[i] > 0)
                {
                    t[i] = hiTarget;
                }
                else
                {
                    t[i] = loTarget;
                }
                fApB = dec_values[i] * A + B;
                if (fApB >= 0)
                {
                    fval += t[i] * fApB + Math.Log(1 + Math.Exp(-fApB));
                }
                else
                {
                    fval += (t[i] - 1) * fApB + Math.Log(1 + Math.Exp(fApB));
                }
            }

            for (iter = 0; iter < max_iter; iter++)
            {
                // Update Gradient and Hessian (use H' = H + sigma I)
                double h11 = sigma; // numerically ensures strict PD
                double h22 = sigma;
                double h21 = 0.0;

                double g1 = 0.0;
                double g2 = 0.0;

                for (int i = 0; i < l; i++)
                {
                    double p, q;
                    fApB = dec_values[i] * A + B;
                    if (fApB >= 0)
                    {
                        p = Math.Exp(-fApB) / (1.0 + Math.Exp(-fApB));
                        q = 1.0 / (1.0 + Math.Exp(-fApB));
                    }
                    else
                    {
                        p = 1.0 / (1.0 + Math.Exp(fApB));
                        q = Math.Exp(fApB) / (1.0 + Math.Exp(fApB));
                    }
                    double d2 = p * q;
                    h11 += dec_values[i] * dec_values[i] * d2;
                    h22 += d2;
                    h21 += dec_values[i] * d2;
                    double d1 = t[i] - p;
                    g1 += dec_values[i] * d1;
                    g2 += d1;
                }

                // Stopping Criteria
                if (Math.Abs(g1) < eps && Math.Abs(g2) < eps)
                {
                    break;
                }

                // Finding Newton direction: -inv(H') * g
                double det = h11 * h22 - h21 * h21;
                double dA  = -(h22 * g1 - h21 * g2) / det;
                double dB  = -(-h21 * g1 + h11 * g2) / det;
                double gd  = g1 * dA + g2 * dB;

                double stepsize = 1; // Line Search
                while (stepsize >= min_step)
                {
                    double newA = A + stepsize * dA;
                    double newB = B + stepsize * dB;

                    // New function value
                    double newf = 0.0;
                    for (int i = 0; i < l; i++)
                    {
                        fApB = dec_values[i] * newA + newB;
                        if (fApB >= 0)
                        {
                            newf += t[i] * fApB + Math.Log(1 + Math.Exp(-fApB));
                        }
                        else
                        {
                            newf += (t[i] - 1) * fApB + Math.Log(1 + Math.Exp(fApB));
                        }
                    }
                    // Check sufficient decrease
                    if (newf < fval + 0.0001 * stepsize * gd)
                    {
                        A = newA; B = newB; fval = newf;
                        break;
                    }
                    else
                    {
                        stepsize = stepsize / 2.0;
                    }
                }

                if (stepsize < min_step)
                {
                    Svm.info("Line search fails in two-class probability estimates\n");
                    break;
                }
            }

            if (iter >= max_iter)
            {
                Svm.info("Reaching maximal iterations in two-class probability estimates\n");
            }
            probAB[0] = A; probAB[1] = B;
        }
Exemple #12
0
        private static DecisionFunction svm_train_one(SvmProblem prob, SvmParameter param, double Cp, double Cn)
        {
            double[] alpha = new double[prob.Lenght];
            var      si    = new SolutionInfo();

            switch (param.SvmType)
            {
            case SvmType.C_SVC:
                solve_c_svc(prob, param, alpha, si, Cp, Cn);
                break;

            case SvmType.NU_SVC:
                solve_nu_svc(prob, param, alpha, si);
                break;

            case SvmType.ONE_CLASS:
                solve_one_class(prob, param, alpha, si);
                break;

            case SvmType.EPSILON_SVR:
                solve_epsilon_svr(prob, param, alpha, si);
                break;

            case SvmType.NU_SVR:
                solve_nu_svr(prob, param, alpha, si);
                break;
            }

            Svm.info("obj = " + si.Obj + ", rho = " + si.Rho + "\n");

            // output SVs

            int nSV  = 0;
            int nBSV = 0;

            for (int i = 0; i < prob.Lenght; i++)
            {
                if (Math.Abs(alpha[i]) > 0)
                {
                    ++nSV;
                    if (prob.Y[i] > 0)
                    {
                        if (Math.Abs(alpha[i]) >= si.UpperBoundP)
                        {
                            ++nBSV;
                        }
                    }
                    else
                    {
                        if (Math.Abs(alpha[i]) >= si.UpperBoundN)
                        {
                            ++nBSV;
                        }
                    }
                }
            }

            Svm.info("nSV = " + nSV + ", nBSV = " + nBSV + "\n");

            var f = new DecisionFunction(alpha, si.Rho);

            return(f);
        }