예제 #1
0
        public override ClassificationModel Train(BaseVector[] x, int[][] y, int ngroups, Parameters param, int nthreads,
                                                  Action <double> reportProgress)
        {
            string err = CheckInput(x, y, ngroups);

            if (err != null)
            {
                throw new Exception(err);
            }
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.CSvc,
                c = param.GetParam <double>("C").Value
            };

            bool[]            invert;
            SvmProblem[]      problems = CreateProblems(x, y, ngroups, out invert);
            SvmModel[]        models   = new SvmModel[problems.Length];
            ThreadDistributor td       = new ThreadDistributor(nthreads, models.Length,
                                                               i => { models[i] = SvmMain.SvmTrain(problems[i], sp); }, fractionDone => { reportProgress?.Invoke(fractionDone); });

            td.Start();
            return(new SvmClassificationModel(models, invert));
        }
예제 #2
0
        public RegressionModel Train(BaseVector[] x, float[] y, Parameters param, int nthreads)
        {
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.EpsilonSvr,
                c = param.GetParam <double>("C").Value
            };
            SvmModel model = SvmMain.SvmTrain(new SvmProblem(x, y), sp);

            return(new SvmRegressionModel(model));
        }
예제 #3
0
        public override RegressionModel Train(BaseVector[] x, int[] nominal, double[] y, Parameters param, int nthreads, Action <double> reportProgress)
        {
            x = ClassificationMethod.ToOneHotEncoding(x, nominal);
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.EpsilonSvr,
                c = param.GetParam <double>("C").Value
            };
            SvmModel model = SvmMain.SvmTrain(new SvmProblem(x, y), sp);

            return(new SvmRegressionModel(model));
        }
        private SvmModel LoadTrainingDataMultiple(Dictionary <double[], int> data)
        {
            var model = new SvmModel();

            model.Inputs  = new double[data.Count][];
            model.Outputs = new int[data.Count];

            var i = 0;

            foreach (var item in data)
            {
                model.Inputs[i]  = item.Key;
                model.Outputs[i] = item.Value;
                i++;
            }

            return(model);
        }
예제 #5
0
 public SvmRegressionModel(SvmModel model)
 {
     this.model = model;
 }
예제 #6
0
    public OneClassModel(SvmModel model)
      : base(model)
    {

    }
예제 #7
0
 public ClassificationModel(SvmModel model)
     : base(model)
 {
 }
예제 #8
0
        //
        // Interface functions
        //
        public static SvmModel Train(SvmProblem prob, SvmParameter param)
        {
            var model = new SvmModel();
              model.Param = param;

              if (param.SvmType.IsSVROrOneClass())
              {
            // regression or one-class-svm
            model.NrClass = 2;
            model.Label = null;
            model.SupportVectorsNumbers = null;
            model.ProbA = null; model.ProbB = null;
            model.SupportVectorsCoefficients = new double[1][];

            if (param.Probability && param.SvmType.IsSVR())
            {
              model.ProbA = new double[1];
              model.ProbA[0] = svm_svr_probability(prob, param);
            }

            DecisionFunction f = svm_train_one(prob, param, 0, 0);
            model.Rho = new double[1];
            model.Rho[0] = f.Rho;

            int nSV = 0;
            int i;
            for (i = 0; i < prob.Lenght; i++)
              if (Math.Abs(f.Alpha[i]) > 0) ++nSV;
            model.TotalSupportVectorsNumber = nSV;
            model.SupportVectors = new SvmNode[nSV][];
            model.SupportVectorsCoefficients[0] = new double[nSV];
            int j = 0;
            for (i = 0; i < prob.Lenght; i++)
              if (Math.Abs(f.Alpha[i]) > 0)
              {
            model.SupportVectors[j] = prob.X[i];
            model.SupportVectorsCoefficients[0][j] = f.Alpha[i];
            ++j;
              }
              }
              else
              {
            // classification
            int l = prob.Lenght;
            int[] perm = new int[l];

            int nr_class;
            int[] label;
            int[] start;
            int[] count;

            // group training data of the same class
            svm_group_classes(prob, out nr_class, out label, out start, out count, perm);

            SvmNode[][] x = new SvmNode[l][];
            int i;
            for (i = 0; i < l; i++)
              x[i] = prob.X[perm[i]];

            // calculate weighted C

            double[] weighted_C = new double[nr_class];
            for (i = 0; i < nr_class; i++)
              weighted_C[i] = param.C;
            for (i = 0; i < param.WeightsCount; i++)
            {
              int j;
              for (j = 0; j < nr_class; j++)
            if (param.WeightLabel[i] == label[j])
              break;
              if (j == nr_class)
            Console.Error.WriteLine("warning: class label " + param.WeightLabel[i] + " specified in weight is not found\n");
              else
            weighted_C[j] *= param.Weight[i];
            }

            // train k*(k-1)/2 models

            var nonzero = new bool[l];
            for (i = 0; i < l; i++)
              nonzero[i] = false;
            var f = new DecisionFunction[nr_class * (nr_class - 1) / 2];

            double[] probA = null, probB = null;
            if (param.Probability)
            {
              probA = new double[nr_class * (nr_class - 1) / 2];
              probB = new double[nr_class * (nr_class - 1) / 2];
            }

            int p = 0;
            for (i = 0; i < nr_class; i++)
              for (int j = i + 1; j < nr_class; j++)
              {

            int si = start[i], sj = start[j];
            int ci = count[i], cj = count[j];
            var subprobLenght = ci + cj;
            var sub_prob = new SvmProblem
            {
              X = new SvmNode[subprobLenght][],
              Y = new double[subprobLenght]
            };

            int k;
            for (k = 0; k < ci; k++)
            {
              sub_prob.X[k] = x[si + k];
              sub_prob.Y[k] = +1;
            }
            for (k = 0; k < cj; k++)
            {
              sub_prob.X[ci + k] = x[sj + k];
              sub_prob.Y[ci + k] = -1;
            }

            if (param.Probability)
            {
              double[] probAB = new double[2];
              svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
              probA[p] = probAB[0];
              probB[p] = probAB[1];
            }

            f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
            for (k = 0; k < ci; k++)
              if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0)
                nonzero[si + k] = true;
            for (k = 0; k < cj; k++)
              if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0)
                nonzero[sj + k] = true;
            ++p;
              }

            // build output

            model.NrClass = nr_class;

            model.Label = new int[nr_class];
            for (i = 0; i < nr_class; i++)
              model.Label[i] = label[i];

            model.Rho = new double[nr_class * (nr_class - 1) / 2];
            for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
              model.Rho[i] = f[i].Rho;

            if (param.Probability)
            {
              model.ProbA = new double[nr_class * (nr_class - 1) / 2];
              model.ProbB = new double[nr_class * (nr_class - 1) / 2];
              for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
              {
            model.ProbA[i] = probA[i];
            model.ProbB[i] = probB[i];
              }
            }
            else
            {
              model.ProbA = null;
              model.ProbB = null;
            }

            int nnz = 0;
            int[] nz_count = new int[nr_class];
            model.SupportVectorsNumbers = new int[nr_class];
            for (i = 0; i < nr_class; i++)
            {
              int nSV = 0;
              for (int j = 0; j < count[i]; j++)
            if (nonzero[start[i] + j])
            {
              ++nSV;
              ++nnz;
            }
              model.SupportVectorsNumbers[i] = nSV;
              nz_count[i] = nSV;
            }

            Svm.info("Total nSV = " + nnz + "\n");

            model.TotalSupportVectorsNumber = nnz;
            model.SupportVectors = new SvmNode[nnz][];
            p = 0;
            for (i = 0; i < l; i++)
              if (nonzero[i]) model.SupportVectors[p++] = x[i];

            int[] nz_start = new int[nr_class];
            nz_start[0] = 0;
            for (i = 1; i < nr_class; i++)
              nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

            model.SupportVectorsCoefficients = new double[nr_class - 1][];
            for (i = 0; i < nr_class - 1; i++)
              model.SupportVectorsCoefficients[i] = new double[nnz];

            p = 0;
            for (i = 0; i < nr_class; i++)
              for (int j = i + 1; j < nr_class; j++)
              {
            // classifier (i,j): coefficients with
            // i are in sv_coef[j-1][nz_start[i]...],
            // j are in sv_coef[i][nz_start[j]...]

            int si = start[i];
            int sj = start[j];
            int ci = count[i];
            int cj = count[j];

            int q = nz_start[i];
            int k;
            for (k = 0; k < ci; k++)
              if (nonzero[si + k])
                model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k];
            q = nz_start[j];
            for (k = 0; k < cj; k++)
              if (nonzero[sj + k])
                model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k];
            ++p;
              }
              }
              return model;
        }
예제 #9
0
        public static void SaveModel(String model_file_name, SvmModel model)
        {
            using (var fp = new StreamWriter(model_file_name, false))
            {
            var param = model.Param;

            fp.Write("svm_type " + svm_type_table[(int)param.SvmType] + "\n");
            fp.Write("kernel_type " + kernel_type_table[(int)param.KernelType] + "\n");

            if (param.KernelType == KernelType.Poly)
                fp.Write("degree " + param.Degree + "\n");

            if (param.KernelType == KernelType.Poly ||
                param.KernelType == KernelType.Rbf ||
                param.KernelType == KernelType.Sigmoid)
                fp.Write("gamma " + param.Gamma + "\n");

            if (param.KernelType == KernelType.Poly ||
                param.KernelType == KernelType.Sigmoid)
                fp.Write("coef0 " + param.Coef0 + "\n");

            int nr_class = model.NrClass;
            int l = model.TotalSupportVectorsNumber;
            fp.Write("nr_class " + nr_class + "\n");
            fp.Write("total_sv " + l + "\n");

            {
                fp.Write("rho");
                for (int i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    fp.Write(" " + model.Rho[i]);
                fp.Write("\n");
            }

            if (model.Label != null)
            {
                fp.Write("label");
                for (int i = 0; i < nr_class; i++)
                    fp.Write(" " + model.Label[i]);
                fp.Write("\n");
            }

            if (model.ProbA != null) // regression has probA only
            {
                fp.Write("probA");
                for (int i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    fp.Write(" " + model.ProbA[i]);
                fp.Write("\n");
            }
            if (model.ProbB != null)
            {
                fp.Write("probB");
                for (int i = 0; i < nr_class*(nr_class - 1)/2; i++)
                    fp.Write(" " + model.ProbB[i]);
                fp.Write("\n");
            }

            if (model.SupportVectorsNumbers != null)
            {
                fp.Write("nr_sv");
                for (int i = 0; i < nr_class; i++)
                    fp.Write(" " + model.SupportVectorsNumbers[i]);
                fp.Write("\n");
            }

            fp.Write("SV\n");
            double[][] sv_coef = model.SupportVectorsCoefficients;
            SvmNode[][] SV = model.SupportVectors;

            for (int i = 0; i < l; i++)
            {
                for (int j = 0; j < nr_class - 1; j++)
                    fp.Write(sv_coef[j][i] + " ");

                SvmNode[] p = SV[i];
                if (param.KernelType == KernelType.Precomputed)
                    fp.Write("0:" + (int) (p[0].Value));
                else
                    for (int j = 0; j < p.Length; j++)
                        fp.Write(p[j].Index + ":" + p[j].Value + " ");
                fp.Write("\n");
            }

            }
        }
예제 #10
0
        //implement later
        //public static svm_model svm_load_model(String model_file_name)
        //{
        //  return svm_load_model(new BufferedReader(new FileReader(model_file_name)));
        //}
        public static SvmModel LoadModel(StreamReader fp)
        {
            // read parameters

            SvmModel model = new SvmModel();
            SvmParameter param = new SvmParameter();
            model.Param = param;
            model.Rho = null;
            model.ProbA = null;
            model.ProbB = null;
            model.Label = null;
            model.SupportVectors = null;

            while (true)
            {
            String cmd = fp.ReadLine();
            String arg = cmd.Substring(cmd.IndexOf(' ') + 1);

            if (cmd.StartsWith("svm_type"))
            {
                param.SvmType = (SvmType) Enum.Parse(typeof (SvmType), arg, ignoreCase:true);
            }
            else if (cmd.StartsWith("kernel_type"))
            {
                param.KernelType = (KernelType)Enum.Parse(typeof(KernelType), arg, ignoreCase: true);
            }
            else if (cmd.StartsWith("degree"))
                param.Degree = atoi(arg);
            else if (cmd.StartsWith("gamma"))
                param.Gamma = atof(arg);
            else if (cmd.StartsWith("coef0"))
                param.Coef0 = atof(arg);
            else if (cmd.StartsWith("nr_class"))
                model.NrClass = atoi(arg);
            else if (cmd.StartsWith("total_sv"))
                model.TotalSupportVectorsNumber = atoi(arg);
            else if (cmd.StartsWith("rho"))
            {
                int n = model.NrClass*(model.NrClass - 1)/2;
                model.Rho = new double[n];
                StringTokenizer st = new StringTokenizer(arg);
                for (int i = 0; i < n; i++)
                    model.Rho[i] = atof(st.NextToken());
            }
            else if (cmd.StartsWith("label"))
            {
                int n = model.NrClass;
                model.Label = new int[n];
                StringTokenizer st = new StringTokenizer(arg);
                for (int i = 0; i < n; i++)
                    model.Label[i] = atoi(st.NextToken());
            }
            else if (cmd.StartsWith("probA"))
            {
                int n = model.NrClass*(model.NrClass - 1)/2;
                model.ProbA = new double[n];
                StringTokenizer st = new StringTokenizer(arg);
                for (int i = 0; i < n; i++)
                    model.ProbA[i] = atof(st.NextToken());
            }
            else if (cmd.StartsWith("probB"))
            {
                int n = model.NrClass*(model.NrClass - 1)/2;
                model.ProbB = new double[n];
                StringTokenizer st = new StringTokenizer(arg);
                for (int i = 0; i < n; i++)
                    model.ProbB[i] = atof(st.NextToken());
            }
            else if (cmd.StartsWith("nr_sv"))
            {
                int n = model.NrClass;
                model.SupportVectorsNumbers = new int[n];
                StringTokenizer st = new StringTokenizer(arg);
                for (int i = 0; i < n; i++)
                    model.SupportVectorsNumbers[i] = atoi(st.NextToken());
            }
            else if (cmd.StartsWith("SV"))
            {
                break;
            }
            else
            {
                Debug.WriteLine("unknown text in model file: [" + cmd + "]\n");
                return null;
            }
            }

            //  // read sv_coef and SV

            int m = model.NrClass - 1;
            int l = model.TotalSupportVectorsNumber;
            model.SupportVectorsCoefficients = new double[m][];
            for (int i = 0; i < m; i++)
            {
            model.SupportVectorsCoefficients[i] = new double[l];
            }
            model.SupportVectors = new SvmNode[l][];

            for (int i = 0; i < l; i++)
            {
            String line = fp.ReadLine();
            var st = new StringTokenizer(line, new[] {' ', '\t', '\n', '\r', '\f', ':'});

            for (int k = 0; k < m; k++)
                model.SupportVectorsCoefficients[k][i] = atof(st.NextToken());
            int n = st.CountTokens()/2;
            model.SupportVectors[i] = new SvmNode[n];
            for (int j = 0; j < n; j++)
            {
                model.SupportVectors[i][j] = new SvmNode(atoi(st.NextToken()), atof(st.NextToken()));
            }
            }

            fp.Close();
            return model;
        }
예제 #11
0
 public RegressionModel(SvmModel model)
     : base(model)
 {
 }
예제 #12
0
 public void Refresh()
 {
     _svmService = null;
     SvmModel    = new SvmModel();
 }
예제 #13
0
 protected ModelBase(SvmModel model)
 {
     _model = model;
 }
예제 #14
0
 protected ModelBase(SvmModel model)
 {
     _model = model;
 }