Exemplo n.º 1
0
        public void Copy(SVMModel rhs)
        {
            _param = rhs._param;
            _numberClass = rhs._numberClass;
            _totalNumberSV = rhs._totalNumberSV;            // total #SV
            _SV = new SVMNode[rhs._SV.Length][];    // SVs (SV[l])

            for (int i = 0; i < rhs._SV.Length; ++i)
            {
                _SV[i] = new SVMNode[rhs._SV[i].Length];
                for (int j = 0; j < rhs._SV[i].Length; ++j)
                {
                    _SV[i][j] = (SVMNode)rhs._SV[i][j].Clone();
                }
            }

            _coefSV = new double[rhs._coefSV.Length][];    // coefficients for SVs in decision functions (sv_coef[k-1][l])
            for (int i = 0; i < rhs._coefSV.Length; ++i)
            {
                _coefSV[i] = (double[])rhs._coefSV[i].Clone();
            }

            _rho = rhs._rho == null ? null : (double[]) rhs._rho.Clone();        // constants in decision functions (rho[k*(k-1)/2])
            ProbA = rhs.ProbA == null ? null : (double[]) rhs.ProbA.Clone();         // pariwise probability information
            ProbB = rhs.ProbB == null ? null : (double[]) rhs.ProbB.Clone();
            _indicesSV = rhs._indicesSV == null ? null : (int[]) rhs._indicesSV.Clone();       // sv_indices[0,...,nSV-1] are values in [1,...,num_traning_data] to indicate SVs in the training set

            // for classification only

            Label = rhs.Label == null ? null : (int[]) rhs.Label.Clone();        // label of each class (label[k])
            _numberSV4EachClass = rhs._numberSV4EachClass == null ? null : (int[]) rhs._numberSV4EachClass.Clone();        // number of SVs for each class (nSV[k])
        }
Exemplo n.º 2
0
        public object Clone()
        {
            SVMParam clone = new SVMParam();

            clone.copy(this);
            return(clone);
        }
Exemplo n.º 3
0
        public SVR_Q(SVMProblem prob, SVMParam param)
            : base(prob.ProblemSize, prob.x, param)
        {
            l     = prob.ProblemSize;
            cache = new Cache(l, (long)(param.CacheSizeInMB * (1 << 20)));
            QD    = new double[2 * l];
            sign  = new int[2 * l];
            index = new int[2 * l];
            for (int k = 0; k < l; k++)
            {
                sign[k]      = 1;
                sign[k + l]  = -1;
                index[k]     = k;
                index[k + l] = k;
                QD[k]        = kernel_function(k, k);
                QD[k + l]    = QD[k];
            }
            buffer = new float[2][];
            for (int i = 0; i < buffer.Length; ++i)
            {
                buffer[i] = new float[2 * l];
            }

            next_buffer = 0;
        }
Exemplo n.º 4
0
 public ONE_CLASS_Q(SVMProblem prob, SVMParam param)
     : base(prob.ProblemSize, prob.x, param)
 {
     cache = new Cache(prob.ProblemSize, (long)(param.CacheSizeInMB * (1 << 20)));
     QD    = new double[prob.ProblemSize];
     for (int i = 0; i < prob.ProblemSize; i++)
     {
         QD[i] = kernel_function(i, i);
     }
 }
Exemplo n.º 5
0
 public SVC_Q(SVMProblem prob, SVMParam param, int[] y_)
     : base(prob.ProblemSize, prob.x, param)
 {
     y     = (int[])y_.Clone();
     cache = new Cache(prob.ProblemSize, (long)(param.CacheSizeInMB * (1 << 20)));
     QD    = new double[prob.ProblemSize];
     for (int i = 0; i < prob.ProblemSize; i++)
     {
         QD[i] = kernel_function(i, i);
     }
 }
Exemplo n.º 6
0
        }                                               // do probability estimates

        public void copy(SVMParam rhs)
        {
            SVMType    = rhs.SVMType;
            KernelType = rhs.KernelType;
            Degree     = rhs.Degree; // for poly
            Gamma      = rhs.Gamma;  // for poly/rbf/sigmoid
            Coef0      = rhs.Coef0;  // for poly/sigmoid

            // these are for training only
            CacheSizeInMB         = rhs.CacheSizeInMB;              // in MB
            Epsilon               = rhs.Epsilon;                    // stopping actionselection
            C                     = rhs.C;                          // for C_SVC, EPSILON_SVR and NU_SVR
            NumberWeight          = rhs.NumberWeight;               // for C_SVC
            WeightLabel           = (int[])rhs.WeightLabel.Clone(); // for C_SVC
            Weight                = (double[])rhs.Weight.Clone();   // for C_SVC
            nu                    = rhs.nu;                         // for NU_SVC, ONE_CLASS, and NU_SVR
            p                     = rhs.p;                          // for EPSILON_SVR
            UseShrinkingHeuristic = rhs.UseShrinkingHeuristic;      // use the shrinking heuristics
            DoProbabilityEstimate = rhs.DoProbabilityEstimate;      // do probability estimates
        }
Exemplo n.º 7
0
        public Kernel(int l, SVMNode[][] x_, SVMParam param)
        {
            this.kernel_type = param.KernelType;
            this.degree      = param.Degree;
            this.gamma       = param.Gamma;
            this.coef0       = param.Coef0;

            x = (SVMNode[][])x_.Clone();

            if (kernel_type == SVMParam.KERNEL_TYPE_RBF)
            {
                x_square = new double[l];
                for (int i = 0; i < l; i++)
                {
                    x_square[i] = dot(x[i], x[i]);
                }
            }
            else
            {
                x_square = null;
            }
        }
Exemplo n.º 8
0
        public static double k_function(SVMNode[] x, SVMNode[] y,
                                        SVMParam param)
        {
            switch (param.KernelType)
            {
            case SVMParam.KERNEL_TYPE_LINEAR:
                return(dot(x, y));

            case SVMParam.KERNEL_TYPE_POLY:
                return(powi(param.Gamma * dot(x, y) + param.Coef0, param.Degree));

            case SVMParam.KERNEL_TYPE_RBF:
            {
                double sum  = 0;
                int    xlen = x.Length;
                int    ylen = y.Length;
                int    i    = 0;
                int    j    = 0;
                while (i < xlen && j < ylen)
                {
                    if (x[i].index == y[j].index)
                    {
                        double d = x[i++].value - y[j++].value;
                        sum += d * d;
                    }
                    else if (x[i].index > y[j].index)
                    {
                        sum += y[j].value * y[j].value;
                        ++j;
                    }
                    else
                    {
                        sum += x[i].value * x[i].value;
                        ++i;
                    }
                }

                while (i < xlen)
                {
                    sum += x[i].value * x[i].value;
                    ++i;
                }

                while (j < ylen)
                {
                    sum += y[j].value * y[j].value;
                    ++j;
                }

                return(Math.Exp(-param.Gamma * sum));
            }

            case SVMParam.KERNEL_TYPE_SIGMOID:
                return(Math.Tanh(param.Gamma * dot(x, y) + param.Coef0));

            case SVMParam.KERNEL_TYPE_PRECOMPUTED:
                return(x[(int)(y[0].value)].value);

            default:
                return(0);      // java
            }
        }