Esempio n. 1
0
 public static double SvmGetSvrProbability(SvmModel model)
 {
     if ((model.param.svmType == SvmType.EpsilonSvr || model.param.svmType == SvmType.NuSvr) && model.probA != null)
     {
         return(model.probA[0]);
     }
     Info("Model doesn't contain information for SVR probability inference\n");
     return(0);
 }
Esempio n. 2
0
 public static int SvmCheckProbabilityModel(SvmModel model)
 {
     if (((model.param.svmType == SvmType.CSvc || model.param.svmType == SvmType.NuSvc) && model.probA != null &&
          model.probB != null) ||
         ((model.param.svmType == SvmType.EpsilonSvr || model.param.svmType == SvmType.NuSvr) && model.probA != null))
     {
         return(1);
     }
     return(0);
 }
Esempio n. 3
0
 public static void SvmGetLabels(SvmModel model, int[] label)
 {
     if (model.label != null)
     {
         for (int i = 0; i < model.nrClass; i++)
         {
             label[i] = model.label[i];
         }
     }
 }
Esempio n. 4
0
        public static float SvmPredict(SvmModel model, BaseVector x)
        {
            int nrClass = model.nrClass;

            double[] decValues;
            if (model.param.svmType == SvmType.OneClass || model.param.svmType == SvmType.EpsilonSvr ||
                model.param.svmType == SvmType.NuSvr)
            {
                decValues = new double[1];
            }
            else
            {
                decValues = new double[nrClass * (nrClass - 1) / 2];
            }
            float predResult = SvmPredictValues(model, x, decValues);

            return(predResult);
        }
        public override ClassificationModel Train(BaseVector[] x, int[][] y, int ngroups, Parameters param, int nthreads,
			Action<double> reportProgress)
        {
            string err = CheckInput(x, y, ngroups);
            if (err != null){
                throw new Exception(err);
            }
            ParameterWithSubParams<int> kernelParam = param.GetParamWithSubParams<int>("Kernel");
            SvmParameter sp = new SvmParameter{
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType = SvmType.CSvc,
                c = param.GetParam<double>("C").Value
            };
            bool[] invert;
            SvmProblem[] problems = CreateProblems(x, y, ngroups, out invert);
            SvmModel[] models = new SvmModel[problems.Length];
            ThreadDistributor td = new ThreadDistributor(nthreads, models.Length,
                i => { models[i] = SvmMain.SvmTrain(problems[i], sp); }, fractionDone => { reportProgress?.Invoke(fractionDone); });
            td.Start();
            return new SvmClassificationModel(models, invert);
        }
Esempio n. 6
0
 public static float SvmPredictProbability(SvmModel model, BaseVector x, double[] probEstimates)
 {
     if ((model.param.svmType == SvmType.CSvc || model.param.svmType == SvmType.NuSvc) && model.probA != null &&
         model.probB != null)
     {
         int      nrClass   = model.nrClass;
         double[] decValues = new double[nrClass * (nrClass - 1) / 2];
         SvmPredictValues(model, x, decValues);
         const double minProb      = 1e-7;
         double[][]   pairwiseProb = new double[nrClass][];
         for (int m = 0; m < nrClass; m++)
         {
             pairwiseProb[m] = new double[nrClass];
         }
         int k = 0;
         for (int i = 0; i < nrClass; i++)
         {
             for (int j = i + 1; j < nrClass; j++)
             {
                 pairwiseProb[i][j] = Math.Min(Math.Max(SigmoidPredict(decValues[k], model.probA[k], model.probB[k]), minProb),
                                               1 - minProb);
                 pairwiseProb[j][i] = 1 - pairwiseProb[i][j];
                 k++;
             }
         }
         MulticlassProbability(nrClass, pairwiseProb, probEstimates);
         int probMaxIdx = 0;
         for (int i = 1; i < nrClass; i++)
         {
             if (probEstimates[i] > probEstimates[probMaxIdx])
             {
                 probMaxIdx = i;
             }
         }
         return(model.label[probMaxIdx]);
     }
     return(SvmPredict(model, x));
 }
Esempio n. 7
0
        // Cross-validation decision values for probability estimates
        internal static void SvmBinarySvcProbability(SvmProblem prob, SvmParameter param, double cp, double cn,
                                                     IList <double> probAb)
        {
            int       i;
            const int nrFold = 5;

            int[]    perm      = new int[prob.Count];
            double[] decValues = new double[prob.Count];
            // random shuffle
            for (i = 0; i < prob.Count; i++)
            {
                perm[i] = i;
            }
            for (i = 0; i < prob.Count; i++)
            {
                int j = i + rand.Next(prob.Count - i);
                do
                {
                    int _ = perm[i];
                    perm[i] = perm[j];
                    perm[j] = _;
                } while (false);
            }
            for (i = 0; i < nrFold; i++)
            {
                int        begin = i * prob.Count / nrFold;
                int        end   = (i + 1) * prob.Count / nrFold;
                int        j;
                int        count   = prob.Count - (end - begin);
                SvmProblem subprob = new SvmProblem {
                    x = new BaseVector[count], y = new float[count]
                };
                int k = 0;
                for (j = 0; j < begin; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                for (j = end; j < prob.Count; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                int pCount = 0, nCount = 0;
                for (j = 0; j < k; j++)
                {
                    if (subprob.y[j] > 0)
                    {
                        pCount++;
                    }
                    else
                    {
                        nCount++;
                    }
                }
                if (pCount == 0 && nCount == 0)
                {
                    for (j = begin; j < end; j++)
                    {
                        decValues[perm[j]] = 0;
                    }
                }
                else if (pCount > 0 && nCount == 0)
                {
                    for (j = begin; j < end; j++)
                    {
                        decValues[perm[j]] = 1;
                    }
                }
                else if (pCount == 0 && nCount > 0)
                {
                    for (j = begin; j < end; j++)
                    {
                        decValues[perm[j]] = -1;
                    }
                }
                else
                {
                    SvmParameter subparam = (SvmParameter)param.Clone();
                    subparam.probability    = false;
                    subparam.c              = 1.0;
                    subparam.nrWeight       = 2;
                    subparam.weightLabel    = new int[2];
                    subparam.weight         = new double[2];
                    subparam.weightLabel[0] = +1;
                    subparam.weightLabel[1] = -1;
                    subparam.weight[0]      = cp;
                    subparam.weight[1]      = cn;
                    SvmModel submodel = SvmTrain(subprob, subparam);
                    for (j = begin; j < end; j++)
                    {
                        double[] decValue = new double[1];
                        SvmPredictValues(submodel, prob.x[perm[j]], decValue);
                        decValues[perm[j]] = decValue[0];
                        // ensure +1 -1 order; reason not using CV subroutine
                        decValues[perm[j]] *= submodel.label[0];
                    }
                }
            }
            SigmoidTrain(prob.Count, decValues, prob.y, probAb);
        }
Esempio n. 8
0
        public static float SvmPredictValues(SvmModel model, BaseVector x, double[] decValues)
        {
            if (model.l == 0)
            {
                return(float.NaN);
            }
            if (model.param.svmType == SvmType.OneClass || model.param.svmType == SvmType.EpsilonSvr ||
                model.param.svmType == SvmType.NuSvr)
            {
                double[] svCoef = model.svCoef[0];
                double   sum    = 0;
                for (int i = 0; i < model.l; i++)
                {
                    sum += svCoef[i] * KFunction(x, model.sv[i], model.param);
                }
                sum         -= model.rho[0];
                decValues[0] = sum;
                if (model.param.svmType == SvmType.OneClass)
                {
                    return(sum > 0 ? 1 : -1);
                }
                return((float)sum);
            }
            int nrClass = model.nrClass;
            int l       = model.l;

            double[] kvalue = new double[l];
            for (int i = 0; i < l; i++)
            {
                kvalue[i] = KFunction(x, model.sv[i], model.param);
            }
            int[] start = new int[nrClass];
            start[0] = 0;
            for (int i = 1; i < nrClass; i++)
            {
                start[i] = start[i - 1] + model.nSv[i - 1];
            }
            int[] vote = new int[nrClass];
            for (int i = 0; i < nrClass; i++)
            {
                vote[i] = 0;
            }
            int p = 0;

            for (int i = 0; i < nrClass; i++)
            {
                for (int j = i + 1; j < nrClass; j++)
                {
                    double   sum = 0;
                    int      si  = start[i];
                    int      sj  = start[j];
                    int      ci  = model.nSv[i];
                    int      cj  = model.nSv[j];
                    int      k;
                    double[] coef1 = model.svCoef[j - 1];
                    double[] coef2 = model.svCoef[i];
                    for (k = 0; k < ci; k++)
                    {
                        sum += coef1[si + k] * kvalue[si + k];
                    }
                    for (k = 0; k < cj; k++)
                    {
                        sum += coef2[sj + k] * kvalue[sj + k];
                    }
                    sum         -= model.rho[p];
                    decValues[p] = sum;
                    if (decValues[p] > 0)
                    {
                        ++vote[i];
                    }
                    else
                    {
                        ++vote[j];
                    }
                    p++;
                }
            }
            int voteMaxIdx = 0;

            for (int i = 1; i < nrClass; i++)
            {
                if (vote[i] > vote[voteMaxIdx])
                {
                    voteMaxIdx = i;
                }
            }
            return(model.label[voteMaxIdx]);
        }
Esempio n. 9
0
 public static void SvmGetLabels(SvmModel model, int[] label)
 {
     if (model.label != null){
         for (int i = 0; i < model.nrClass; i++){
             label[i] = model.label[i];
         }
     }
 }
 public SvmClassificationModel(SvmModel[] models, bool[] invert)
 {
     this.models = models;
     this.invert = invert;
 }
Esempio n. 11
0
 //
 // Interface functions
 //
 public static SvmModel SvmTrain(SvmProblem prob, SvmParameter param)
 {
     SvmModel model = new SvmModel{param = param};
     if (param.svmType == SvmType.OneClass || param.svmType == SvmType.EpsilonSvr || param.svmType == SvmType.NuSvr){
         // regression or one-class-svm
         model.nrClass = 2;
         model.label = null;
         model.nSv = null;
         model.probA = null;
         model.probB = null;
         model.svCoef = new double[1][];
         if (param.probability && (param.svmType == SvmType.EpsilonSvr || param.svmType == SvmType.NuSvr)){
             model.probA = new double[1];
             model.probA[0] = SvmSvrProbability(prob, param);
         }
         DecisionFunction f = SvmTrainOne(prob, param, 0, 0);
         model.rho = new double[1];
         model.rho[0] = f.rho;
         int nSv = 0;
         int i;
         for (i = 0; i < prob.Count; i++){
             if (Math.Abs(f.alpha[i]) > 0){
                 ++nSv;
             }
         }
         model.l = nSv;
         model.sv = new BaseVector[nSv];
         model.svCoef[0] = new double[nSv];
         int j = 0;
         for (i = 0; i < prob.Count; i++){
             if (Math.Abs(f.alpha[i]) > 0){
                 model.sv[j] = prob.x[i];
                 model.svCoef[0][j] = f.alpha[i];
                 ++j;
             }
         }
     } else{
         // classification
         int l = prob.Count;
         int[] tmpNrClass = new int[1];
         int[][] tmpLabel = new int[1][];
         int[][] tmpStart = new int[1][];
         int[][] tmpCount = new int[1][];
         int[] perm = new int[l];
         // group training data of the same class
         SvmGroupClasses(prob, tmpNrClass, tmpLabel, tmpStart, tmpCount, perm);
         int nrClass = tmpNrClass[0];
         int[] label = tmpLabel[0];
         int[] start = tmpStart[0];
         int[] count = tmpCount[0];
         if (nrClass == 1){
             Info("WARNING: training data in only one class. See README for details.\n");
         }
         BaseVector[] x = new BaseVector[l];
         int i;
         for (i = 0; i < l; i++){
             x[i] = prob.x[perm[i]];
         }
         // calculate weighted C
         double[] weightedC = new double[nrClass];
         for (i = 0; i < nrClass; i++){
             weightedC[i] = param.c;
         }
         for (i = 0; i < param.nrWeight; i++){
             int j;
             for (j = 0; j < nrClass; j++){
                 if (param.weightLabel[i] == label[j]){
                     break;
                 }
             }
             if (j == nrClass){
                 Info("WARNING: class label " + param.weightLabel[i] + " specified in weight is not found\n");
             } else{
                 weightedC[j] *= param.weight[i];
             }
         }
         // train k*(k-1)/2 models
         bool[] nonzero = new bool[l];
         for (i = 0; i < l; i++){
             nonzero[i] = false;
         }
         DecisionFunction[] f = new DecisionFunction[nrClass*(nrClass - 1)/2];
         double[] probA = null, probB = null;
         if (param.probability){
             probA = new double[nrClass*(nrClass - 1)/2];
             probB = new double[nrClass*(nrClass - 1)/2];
         }
         int p = 0;
         for (i = 0; i < nrClass; i++){
             for (int j = i + 1; j < nrClass; j++){
                 int si = start[i], sj = start[j];
                 int ci = count[i], cj = count[j];
                 int c = ci + cj;
                 SvmProblem subProb = new SvmProblem{x = new BaseVector[c], y = new float[c]};
                 int k;
                 for (k = 0; k < ci; k++){
                     subProb.x[k] = x[si + k];
                     subProb.y[k] = +1;
                 }
                 for (k = 0; k < cj; k++){
                     subProb.x[ci + k] = x[sj + k];
                     subProb.y[ci + k] = -1;
                 }
                 if (param.probability){
                     double[] probAb = new double[2];
                     SvmBinarySvcProbability(subProb, param, weightedC[i], weightedC[j], probAb);
                     probA[p] = probAb[0];
                     probB[p] = probAb[1];
                 }
                 f[p] = SvmTrainOne(subProb, param, weightedC[i], weightedC[j]);
                 for (k = 0; k < ci; k++){
                     if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0){
                         nonzero[si + k] = true;
                     }
                 }
                 for (k = 0; k < cj; k++){
                     if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0){
                         nonzero[sj + k] = true;
                     }
                 }
                 ++p;
             }
         }
         // build output
         model.nrClass = nrClass;
         model.label = new int[nrClass];
         for (i = 0; i < nrClass; i++){
             model.label[i] = label[i];
         }
         model.rho = new double[nrClass*(nrClass - 1)/2];
         for (i = 0; i < nrClass*(nrClass - 1)/2; i++){
             model.rho[i] = f[i].rho;
         }
         if (param.probability){
             model.probA = new double[nrClass*(nrClass - 1)/2];
             model.probB = new double[nrClass*(nrClass - 1)/2];
             for (i = 0; i < nrClass*(nrClass - 1)/2; i++){
                 model.probA[i] = probA[i];
                 model.probB[i] = probB[i];
             }
         } else{
             model.probA = null;
             model.probB = null;
         }
         int nnz = 0;
         int[] nzCount = new int[nrClass];
         model.nSv = new int[nrClass];
         for (i = 0; i < nrClass; i++){
             int nSv = 0;
             for (int j = 0; j < count[i]; j++){
                 if (nonzero[start[i] + j]){
                     ++nSv;
                     ++nnz;
                 }
             }
             model.nSv[i] = nSv;
             nzCount[i] = nSv;
         }
         Info("Total nSV = " + nnz + "\n");
         model.l = nnz;
         model.sv = new BaseVector[nnz];
         p = 0;
         for (i = 0; i < l; i++){
             if (nonzero[i]){
                 model.sv[p++] = x[i];
             }
         }
         int[] nzStart = new int[nrClass];
         nzStart[0] = 0;
         for (i = 1; i < nrClass; i++){
             nzStart[i] = nzStart[i - 1] + nzCount[i - 1];
         }
         model.svCoef = new double[nrClass - 1][];
         for (i = 0; i < nrClass - 1; i++){
             model.svCoef[i] = new double[nnz];
         }
         p = 0;
         for (i = 0; i < nrClass; i++){
             for (int j = i + 1; j < nrClass; j++){
                 // classifier (i,j): coefficients with
                 // i are in sv_coef[j-1][nz_start[i]...],
                 // j are in sv_coef[i][nz_start[j]...]
                 int si = start[i];
                 int sj = start[j];
                 int ci = count[i];
                 int cj = count[j];
                 int q = nzStart[i];
                 int k;
                 for (k = 0; k < ci; k++){
                     if (nonzero[si + k]){
                         model.svCoef[j - 1][q++] = f[p].alpha[k];
                     }
                 }
                 q = nzStart[j];
                 for (k = 0; k < cj; k++){
                     if (nonzero[sj + k]){
                         model.svCoef[i][q++] = f[p].alpha[ci + k];
                     }
                 }
                 ++p;
             }
         }
     }
     return model;
 }
Esempio n. 12
0
 public static float SvmPredictValues(SvmModel model, BaseVector x, double[] decValues)
 {
     if (model.l == 0){
         return float.NaN;
     }
     if (model.param.svmType == SvmType.OneClass || model.param.svmType == SvmType.EpsilonSvr ||
         model.param.svmType == SvmType.NuSvr){
         double[] svCoef = model.svCoef[0];
         double sum = 0;
         for (int i = 0; i < model.l; i++){
             sum += svCoef[i]*KFunction(x, model.sv[i], model.param);
         }
         sum -= model.rho[0];
         decValues[0] = sum;
         if (model.param.svmType == SvmType.OneClass){
             return sum > 0 ? 1 : -1;
         }
         return (float) sum;
     }
     int nrClass = model.nrClass;
     int l = model.l;
     double[] kvalue = new double[l];
     for (int i = 0; i < l; i++){
         kvalue[i] = KFunction(x, model.sv[i], model.param);
     }
     int[] start = new int[nrClass];
     start[0] = 0;
     for (int i = 1; i < nrClass; i++){
         start[i] = start[i - 1] + model.nSv[i - 1];
     }
     int[] vote = new int[nrClass];
     for (int i = 0; i < nrClass; i++){
         vote[i] = 0;
     }
     int p = 0;
     for (int i = 0; i < nrClass; i++){
         for (int j = i + 1; j < nrClass; j++){
             double sum = 0;
             int si = start[i];
             int sj = start[j];
             int ci = model.nSv[i];
             int cj = model.nSv[j];
             int k;
             double[] coef1 = model.svCoef[j - 1];
             double[] coef2 = model.svCoef[i];
             for (k = 0; k < ci; k++){
                 sum += coef1[si + k]*kvalue[si + k];
             }
             for (k = 0; k < cj; k++){
                 sum += coef2[sj + k]*kvalue[sj + k];
             }
             sum -= model.rho[p];
             decValues[p] = sum;
             if (decValues[p] > 0){
                 ++vote[i];
             } else{
                 ++vote[j];
             }
             p++;
         }
     }
     int voteMaxIdx = 0;
     for (int i = 1; i < nrClass; i++){
         if (vote[i] > vote[voteMaxIdx]){
             voteMaxIdx = i;
         }
     }
     return model.label[voteMaxIdx];
 }
Esempio n. 13
0
 public static float SvmPredictProbability(SvmModel model, BaseVector x, double[] probEstimates)
 {
     if ((model.param.svmType == SvmType.CSvc || model.param.svmType == SvmType.NuSvc) && model.probA != null &&
         model.probB != null){
         int nrClass = model.nrClass;
         double[] decValues = new double[nrClass*(nrClass - 1)/2];
         SvmPredictValues(model, x, decValues);
         const double minProb = 1e-7;
         double[][] pairwiseProb = new double[nrClass][];
         for (int m = 0; m < nrClass; m++){
             pairwiseProb[m] = new double[nrClass];
         }
         int k = 0;
         for (int i = 0; i < nrClass; i++){
             for (int j = i + 1; j < nrClass; j++){
                 pairwiseProb[i][j] = Math.Min(Math.Max(SigmoidPredict(decValues[k], model.probA[k], model.probB[k]), minProb),
                     1 - minProb);
                 pairwiseProb[j][i] = 1 - pairwiseProb[i][j];
                 k++;
             }
         }
         MulticlassProbability(nrClass, pairwiseProb, probEstimates);
         int probMaxIdx = 0;
         for (int i = 1; i < nrClass; i++){
             if (probEstimates[i] > probEstimates[probMaxIdx]){
                 probMaxIdx = i;
             }
         }
         return model.label[probMaxIdx];
     }
     return SvmPredict(model, x);
 }
Esempio n. 14
0
 public static float SvmPredict(SvmModel model, BaseVector x)
 {
     int nrClass = model.nrClass;
     double[] decValues;
     if (model.param.svmType == SvmType.OneClass || model.param.svmType == SvmType.EpsilonSvr ||
         model.param.svmType == SvmType.NuSvr){
         decValues = new double[1];
     } else{
         decValues = new double[nrClass*(nrClass - 1)/2];
     }
     float predResult = SvmPredictValues(model, x, decValues);
     return predResult;
 }
Esempio n. 15
0
 public static double SvmGetSvrProbability(SvmModel model)
 {
     if ((model.param.svmType == SvmType.EpsilonSvr || model.param.svmType == SvmType.NuSvr) && model.probA != null){
         return model.probA[0];
     }
     Info("Model doesn't contain information for SVR probability inference\n");
     return 0;
 }
Esempio n. 16
0
        //
        // Interface functions
        //
        public static SvmModel SvmTrain(SvmProblem prob, SvmParameter param)
        {
            SvmModel model = new SvmModel {
                param = param
            };

            if (param.svmType == SvmType.OneClass || param.svmType == SvmType.EpsilonSvr || param.svmType == SvmType.NuSvr)
            {
                // regression or one-class-svm
                model.nrClass = 2;
                model.label   = null;
                model.nSv     = null;
                model.probA   = null;
                model.probB   = null;
                model.svCoef  = new double[1][];
                if (param.probability && (param.svmType == SvmType.EpsilonSvr || param.svmType == SvmType.NuSvr))
                {
                    model.probA    = new double[1];
                    model.probA[0] = SvmSvrProbability(prob, param);
                }
                DecisionFunction f = SvmTrainOne(prob, param, 0, 0);
                model.rho    = new double[1];
                model.rho[0] = f.rho;
                int nSv = 0;
                int i;
                for (i = 0; i < prob.Count; i++)
                {
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        ++nSv;
                    }
                }
                model.l         = nSv;
                model.sv        = new BaseVector[nSv];
                model.svCoef[0] = new double[nSv];
                int j = 0;
                for (i = 0; i < prob.Count; i++)
                {
                    if (Math.Abs(f.alpha[i]) > 0)
                    {
                        model.sv[j]        = prob.x[i];
                        model.svCoef[0][j] = f.alpha[i];
                        ++j;
                    }
                }
            }
            else
            {
                // classification
                int     l          = prob.Count;
                int[]   tmpNrClass = new int[1];
                int[][] tmpLabel   = new int[1][];
                int[][] tmpStart   = new int[1][];
                int[][] tmpCount   = new int[1][];
                int[]   perm       = new int[l];
                // group training data of the same class
                SvmGroupClasses(prob, tmpNrClass, tmpLabel, tmpStart, tmpCount, perm);
                int   nrClass = tmpNrClass[0];
                int[] label   = tmpLabel[0];
                int[] start   = tmpStart[0];
                int[] count   = tmpCount[0];
                if (nrClass == 1)
                {
                    Info("WARNING: training data in only one class. See README for details.\n");
                }
                BaseVector[] x = new BaseVector[l];
                int          i;
                for (i = 0; i < l; i++)
                {
                    x[i] = prob.x[perm[i]];
                }
                // calculate weighted C
                double[] weightedC = new double[nrClass];
                for (i = 0; i < nrClass; i++)
                {
                    weightedC[i] = param.c;
                }
                for (i = 0; i < param.nrWeight; i++)
                {
                    int j;
                    for (j = 0; j < nrClass; j++)
                    {
                        if (param.weightLabel[i] == label[j])
                        {
                            break;
                        }
                    }
                    if (j == nrClass)
                    {
                        Info("WARNING: class label " + param.weightLabel[i] + " specified in weight is not found\n");
                    }
                    else
                    {
                        weightedC[j] *= param.weight[i];
                    }
                }
                // train k*(k-1)/2 models
                bool[] nonzero = new bool[l];
                for (i = 0; i < l; i++)
                {
                    nonzero[i] = false;
                }
                DecisionFunction[] f     = new DecisionFunction[nrClass * (nrClass - 1) / 2];
                double[]           probA = null, probB = null;
                if (param.probability)
                {
                    probA = new double[nrClass * (nrClass - 1) / 2];
                    probB = new double[nrClass * (nrClass - 1) / 2];
                }
                int p = 0;
                for (i = 0; i < nrClass; i++)
                {
                    for (int j = i + 1; j < nrClass; j++)
                    {
                        int        si = start[i], sj = start[j];
                        int        ci = count[i], cj = count[j];
                        int        c       = ci + cj;
                        SvmProblem subProb = new SvmProblem {
                            x = new BaseVector[c], y = new float[c]
                        };
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            subProb.x[k] = x[si + k];
                            subProb.y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            subProb.x[ci + k] = x[sj + k];
                            subProb.y[ci + k] = -1;
                        }
                        if (param.probability)
                        {
                            double[] probAb = new double[2];
                            SvmBinarySvcProbability(subProb, param, weightedC[i], weightedC[j], probAb);
                            probA[p] = probAb[0];
                            probB[p] = probAb[1];
                        }
                        f[p] = SvmTrainOne(subProb, param, weightedC[i], weightedC[j]);
                        for (k = 0; k < ci; k++)
                        {
                            if (!nonzero[si + k] && Math.Abs(f[p].alpha[k]) > 0)
                            {
                                nonzero[si + k] = true;
                            }
                        }
                        for (k = 0; k < cj; k++)
                        {
                            if (!nonzero[sj + k] && Math.Abs(f[p].alpha[ci + k]) > 0)
                            {
                                nonzero[sj + k] = true;
                            }
                        }
                        ++p;
                    }
                }
                // build output
                model.nrClass = nrClass;
                model.label   = new int[nrClass];
                for (i = 0; i < nrClass; i++)
                {
                    model.label[i] = label[i];
                }
                model.rho = new double[nrClass * (nrClass - 1) / 2];
                for (i = 0; i < nrClass * (nrClass - 1) / 2; i++)
                {
                    model.rho[i] = f[i].rho;
                }
                if (param.probability)
                {
                    model.probA = new double[nrClass * (nrClass - 1) / 2];
                    model.probB = new double[nrClass * (nrClass - 1) / 2];
                    for (i = 0; i < nrClass * (nrClass - 1) / 2; i++)
                    {
                        model.probA[i] = probA[i];
                        model.probB[i] = probB[i];
                    }
                }
                else
                {
                    model.probA = null;
                    model.probB = null;
                }
                int   nnz     = 0;
                int[] nzCount = new int[nrClass];
                model.nSv = new int[nrClass];
                for (i = 0; i < nrClass; i++)
                {
                    int nSv = 0;
                    for (int j = 0; j < count[i]; j++)
                    {
                        if (nonzero[start[i] + j])
                        {
                            ++nSv;
                            ++nnz;
                        }
                    }
                    model.nSv[i] = nSv;
                    nzCount[i]   = nSv;
                }
                Info("Total nSV = " + nnz + "\n");
                model.l  = nnz;
                model.sv = new BaseVector[nnz];
                p        = 0;
                for (i = 0; i < l; i++)
                {
                    if (nonzero[i])
                    {
                        model.sv[p++] = x[i];
                    }
                }
                int[] nzStart = new int[nrClass];
                nzStart[0] = 0;
                for (i = 1; i < nrClass; i++)
                {
                    nzStart[i] = nzStart[i - 1] + nzCount[i - 1];
                }
                model.svCoef = new double[nrClass - 1][];
                for (i = 0; i < nrClass - 1; i++)
                {
                    model.svCoef[i] = new double[nnz];
                }
                p = 0;
                for (i = 0; i < nrClass; i++)
                {
                    for (int j = i + 1; j < nrClass; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]
                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];
                        int q  = nzStart[i];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            if (nonzero[si + k])
                            {
                                model.svCoef[j - 1][q++] = f[p].alpha[k];
                            }
                        }
                        q = nzStart[j];
                        for (k = 0; k < cj; k++)
                        {
                            if (nonzero[sj + k])
                            {
                                model.svCoef[i][q++] = f[p].alpha[ci + k];
                            }
                        }
                        ++p;
                    }
                }
            }
            return(model);
        }
Esempio n. 17
0
        // Stratified cross validation
        public static void SvmCrossValidation(SvmProblem prob, SvmParameter param, int nrFold, double[] target)
        {
            int i;

            int[] foldStart = new int[nrFold + 1];
            int   l         = prob.Count;

            int[] perm = new int[l];
            // stratified cv may not give leave-one-out rate
            // Each class to l folds -> some folds may have zero elements
            if ((param.svmType == SvmType.CSvc || param.svmType == SvmType.NuSvc) && nrFold < l)
            {
                int[]   tmpNrClass = new int[1];
                int[][] tmpLabel   = new int[1][];
                int[][] tmpStart   = new int[1][];
                int[][] tmpCount   = new int[1][];
                SvmGroupClasses(prob, tmpNrClass, tmpLabel, tmpStart, tmpCount, perm);
                int   nrClass = tmpNrClass[0];
                int[] start   = tmpStart[0];
                int[] count   = tmpCount[0];
                // random shuffle and then data grouped by fold using the array perm
                int[] foldCount = new int[nrFold];
                int[] index     = new int[l];
                for (i = 0; i < l; i++)
                {
                    index[i] = perm[i];
                }
                for (int c = 0; c < nrClass; c++)
                {
                    for (i = 0; i < count[c]; i++)
                    {
                        int j = i + rand.Next(count[c] - i);
                        do
                        {
                            int _ = index[start[c] + j];
                            index[start[c] + j] = index[start[c] + i];
                            index[start[c] + i] = _;
                        } while (false);
                    }
                }
                for (i = 0; i < nrFold; i++)
                {
                    foldCount[i] = 0;
                    for (int c = 0; c < nrClass; c++)
                    {
                        foldCount[i] += (i + 1) * count[c] / nrFold - i * count[c] / nrFold;
                    }
                }
                foldStart[0] = 0;
                for (i = 1; i <= nrFold; i++)
                {
                    foldStart[i] = foldStart[i - 1] + foldCount[i - 1];
                }
                for (int c = 0; c < nrClass; c++)
                {
                    for (i = 0; i < nrFold; i++)
                    {
                        int begin = start[c] + i * count[c] / nrFold;
                        int end   = start[c] + (i + 1) * count[c] / nrFold;
                        for (int j = begin; j < end; j++)
                        {
                            perm[foldStart[i]] = index[j];
                            foldStart[i]++;
                        }
                    }
                }
                foldStart[0] = 0;
                for (i = 1; i <= nrFold; i++)
                {
                    foldStart[i] = foldStart[i - 1] + foldCount[i - 1];
                }
            }
            else
            {
                for (i = 0; i < l; i++)
                {
                    perm[i] = i;
                }
                for (i = 0; i < l; i++)
                {
                    int j = i + rand.Next(l - i);
                    do
                    {
                        int _ = perm[i];
                        perm[i] = perm[j];
                        perm[j] = _;
                    } while (false);
                }
                for (i = 0; i <= nrFold; i++)
                {
                    foldStart[i] = i * l / nrFold;
                }
            }
            for (i = 0; i < nrFold; i++)
            {
                int        begin = foldStart[i];
                int        end   = foldStart[i + 1];
                int        j;
                int        count   = l - (end - begin);
                SvmProblem subprob = new SvmProblem {
                    x = new BaseVector[count], y = new float[count]
                };
                int k = 0;
                for (j = 0; j < begin; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                for (j = end; j < l; j++)
                {
                    subprob.x[k] = prob.x[perm[j]];
                    subprob.y[k] = prob.y[perm[j]];
                    ++k;
                }
                SvmModel submodel = SvmTrain(subprob, param);
                if (param.probability && (param.svmType == SvmType.CSvc || param.svmType == SvmType.NuSvc))
                {
                    double[] probEstimates = new double[submodel.nrClass];
                    for (j = begin; j < end; j++)
                    {
                        target[perm[j]] = SvmPredictProbability(submodel, prob.x[perm[j]], probEstimates);
                    }
                }
                else
                {
                    for (j = begin; j < end; j++)
                    {
                        target[perm[j]] = SvmPredict(submodel, prob.x[perm[j]]);
                    }
                }
            }
        }
Esempio n. 18
0
 public static int SvmCheckProbabilityModel(SvmModel model)
 {
     if (((model.param.svmType == SvmType.CSvc || model.param.svmType == SvmType.NuSvc) && model.probA != null &&
         model.probB != null) ||
         ((model.param.svmType == SvmType.EpsilonSvr || model.param.svmType == SvmType.NuSvr) && model.probA != null)){
         return 1;
     }
     return 0;
 }