Example #1
0
 public static bool UseNuParameter(this SvmType svm_type)
 {
     // public double nu;	          // for NU_SVC, ONE_CLASS, and NU_SVR
     return(svm_type == SvmType.NU_SVC ||
            svm_type == SvmType.ONE_CLASS ||
            svm_type == SvmType.NU_SVR);
 }
Example #2
0
 public static bool UseCParameter(this SvmType svm_type)
 {
     // public double C;	          // for C_SVC, EPSILON_SVR and NU_SVR
     return(svm_type == SvmType.C_SVC ||
            svm_type == SvmType.EPSILON_SVR ||
            svm_type == SvmType.NU_SVR);
 }
        private double testMulticlassModel(int numberOfClasses, int count, SvmType svm, KernelType kernel, bool probability = false, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateMulticlassProblem(numberOfClasses, count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma       = 1.0 / 3;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;
            if (svm == SvmType.C_SVC)
            {
                for (int i = 0; i < numberOfClasses; i++)
                {
                    param.Weights[i] = 1;
                }
            }

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateMulticlassProblem(numberOfClasses, count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
        public static TrainingHeader Create(KernelType type, SvmType svmType)
        {
            TrainingHeader header = new TrainingHeader();

            header.GridSelection = true;
            header.Normalization = NormalizationType.None;
            header.Kernel        = type;
            header.SvmType       = svmType;
            return(header);
        }
Example #5
0
        public static double[] PredictProbability(Model model, Node[] x)
        {
            SvmType svmType = Procedures.svm_get_svm_type(model);

            if (svmType != 0 && svmType != SvmType.NU_SVC)
            {
                throw new Exception("Model type " + svmType + " unable to predict probabilities.");
            }
            int num = Procedures.svm_get_nr_class(model);

            double[] array = new double[num];
            Procedures.svm_predict_probability(model, x, array);
            return(array);
        }
        /// <summary>
        /// Predicts a class distribution for the single input vector.
        /// </summary>
        /// <param name="model">Model to use for prediction</param>
        /// <param name="x">The vector for which to predict the class distribution</param>
        /// <returns>A probability distribtion over classes</returns>
        public static double[] PredictProbability(Model model, Node[] x)
        {
            SvmType svm_type = Procedures.svm_get_svm_type(model);

            if (svm_type != SvmType.C_SVC && svm_type != SvmType.NU_SVC)
            {
                throw new Exception("Model type " + svm_type + " unable to predict probabilities.");
            }
            int nr_class = Procedures.svm_get_nr_class(model);

            double[] probEstimates = new double[nr_class];
            Procedures.svm_predict_probability(model, x, probEstimates);
            return(probEstimates);
        }
        public void TestMulticlassProbability()
        {
            SvmType[]    svmTypes    = new SvmType[] { SvmType.C_SVC, SvmType.NU_SVC };
            KernelType[] kernelTypes = new KernelType[] { KernelType.LINEAR, KernelType.POLY, KernelType.RBF, KernelType.SIGMOID };

            foreach (SvmType svm in svmTypes)
            {
                foreach (KernelType kernel in kernelTypes)
                {
                    double score = testMulticlassModel(8, 100, svm, kernel, true);

                    Assert.AreEqual(1, score, .1, string.Format("SVM {0} with Kernel {1} did not train correctly", svm, kernel));
                }
            }
        }
Example #8
0
 public Parameter()
 {
     this._svmType     = SvmType.C_SVC;
     this._kernelType  = KernelType.RBF;
     this._degree      = 3;
     this._gamma       = 0.0;
     this._coef0       = 0.0;
     this._nu          = 0.5;
     this._cacheSize   = 40.0;
     this._C           = 1.0;
     this._eps         = 0.001;
     this._p           = 0.1;
     this._shrinking   = true;
     this._probability = false;
     this._weights     = new Dictionary <int, double>();
 }
Example #9
0
 public override int GetHashCode()
 {
     return(C.GetHashCode() +
            CacheSize.GetHashCode() +
            Coefficient0.GetHashCode() +
            Degree.GetHashCode() +
            EPS.GetHashCode() +
            Gamma.GetHashCode() +
            KernelType.GetHashCode() +
            Nu.GetHashCode() +
            P.GetHashCode() +
            Probability.GetHashCode() +
            Shrinking.GetHashCode() +
            SvmType.GetHashCode() +
            Weights.ToArray().ComputeHashcode());
 }
Example #10
0
 /// <summary>
 /// Default Constructor.  Gives good default values to all parameters.
 /// </summary>
 public Parameter()
 {
     _svmType     = SvmType.NU_SVR;
     _kernelType  = KernelType.LINEAR;
     _degree      = 3;
     _gamma       = 0; // 1/k
     _coef0       = 0;
     _nu          = 0.5;
     _cacheSize   = 100;
     _C           = 2;
     _eps         = 1e-3;
     _p           = 0.1;
     _shrinking   = true;
     _probability = false;
     _weights     = new Dictionary <int, double>();
 }
Example #11
0
        public void TestRegression()
        {
            SvmType[] svmTypes = new SvmType[] { SvmType.NU_SVR, SvmType.EPSILON_SVR };
            // LINEAR kernel is pretty horrible for regression
            KernelType[] kernelTypes = new KernelType[] { KernelType.LINEAR, KernelType.RBF, KernelType.SIGMOID };

            foreach (SvmType svm in svmTypes)
            {
                foreach (KernelType kernel in kernelTypes)
                {
                    double error = testRegressionModel(100, svm, kernel);

                    Assert.AreEqual(0, error, 2, string.Format("SVM {0} with Kernel {1} did not train correctly", svm, kernel));
                }
            }
        }
Example #12
0
        private double testRegressionModel(int count, SvmType svm, KernelType kernel, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateRegressionProblem(count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma      = 1.0 / 2;
            param.SvmType    = svm;
            param.KernelType = kernel;
            param.Degree     = 2;

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateRegressionProblem(count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
        private double testTwoClassModel(int count, SvmType svm, KernelType kernel, bool probability = false, string outputFile = null)
        {
            Problem        train     = SVMUtilities.CreateTwoClassProblem(count);
            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma       = .5;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;
            if (svm == SvmType.C_SVC)
            {
                param.Weights[-1] = 1;
                param.Weights[1]  = 1;
            }

            Model model = Training.Train(scaled, param);

            Problem test = SVMUtilities.CreateTwoClassProblem(count, false);

            scaled = transform.Scale(test);
            return(Prediction.Predict(scaled, outputFile, model, false));
        }
Example #14
0
 public static bool IsOneClass(this SvmType svm_type)
 {
     return(svm_type == SvmType.ONE_CLASS);
 }
Example #15
0
        public void SVMClassifierTrain(List <Sentence> sentences, ClassifyOptions options, SvmType svm = SvmType.C_SVC, KernelType kernel = KernelType.RBF, bool probability = true, string outputFile = null)
        {
            var tfidf = new TfIdfFeatureExtractor();

            tfidf.Dimension = options.Dimension;
            tfidf.Sentences = sentences;
            tfidf.CalBasedOnCategory();
            featuresInTfIdf = tfidf.Keywords();

            // copy test multiclass Model
            Problem train = new Problem();

            train.X        = GetData(sentences, options).ToArray();
            train.Y        = GetLabels(sentences).ToArray();
            train.Count    = train.X.Count();
            train.MaxIndex = train.X[0].Count();//int.MaxValue;

            Parameter param = new Parameter();

            transform = RangeTransform.Compute(train);
            Problem scaled = transform.Scale(train);

            param.Gamma       = 1.0 / 3;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;

            int numberOfClasses = train.Y.OrderBy(x => x).Distinct().Count();

            if (numberOfClasses == 1)
            {
                Console.Write("Number of classes must greater than one!");
            }

            if (svm == SvmType.C_SVC)
            {
                for (int i = 0; i < numberOfClasses; i++)
                {
                    param.Weights[i] = 1;
                }
            }

            model = Training.Train(scaled, param);

            Console.Write("Training finished!");
        }
        /// <summary>
        /// Predicts the class memberships of all the vectors in the problem.
        /// </summary>
        /// <param name="problem">The SVM Problem to solve</param>
        /// <param name="model">The Model to use</param>
        /// <param name="predictProbability">Whether to output a distribution over the classes</param>
        /// <returns>Percentage correctly labelled</returns>
        public static PredictionResult Predict(
            Problem problem,
            Model model,
            bool predictProbability)
        {
            int correct              = 0;
            PredictionResult result  = new PredictionResult(model.Parameter);
            SvmType          svmType = Procedures.SvmGetSvmType(model);
            int numberOfClasses      = Procedures.SvmGetNrClass(model);

            int[]    labels        = new int[numberOfClasses];
            double[] probEstimates = null;

            if (predictProbability)
            {
                if (svmType == SvmType.EPSILON_SVR || svmType == SvmType.NU_SVR)
                {
                    log.Info("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + Procedures.SvmGetSvrProbability(model));
                }
                else
                {
                    probEstimates = new double[numberOfClasses];
                }
            }

            Procedures.SvmGetLabels(model, labels);
            for (int j = 0; j < numberOfClasses; j++)
            {
                result.AddLabel(labels[j]);
            }

            for (int i = 0; i < problem.Count; i++)
            {
                ClassificationClass item = new ClassificationClass();
                item.Target = problem.Y[i];
                Node[] xValues = problem.X[i];

                if (predictProbability &&
                    (svmType == SvmType.C_SVC || svmType == SvmType.NU_SVC))
                {
                    Procedures.SvmPredictProbability(model, xValues, probEstimates);
                    double max = 0;
                    for (int j = 0; j < numberOfClasses; j++)
                    {
                        if (probEstimates[j] > max)
                        {
                            max         = probEstimates[j];
                            item.Actual = labels[j];
                        }

                        item.Add(probEstimates[j]);
                    }
                }
                else
                {
                    item.Actual = (int)Procedures.SvmPredict(model, xValues);
                }

                if (item.Actual == item.Target)
                {
                    correct++;
                }

                result.Set(item);
            }

            result.CorrectProbability = (double)correct / problem.Count;
            return(result);
        }
Example #17
0
        public static string SvmCheckParameter(SvmProblem prob, SvmParameter param)
        {
            SvmType svmType = param.svmType;

            // cache_size,eps,C,nu,p,shrinking
            if (param.cacheSize <= 0)
            {
                return("cache_size <= 0");
            }
            if (param.eps <= 0)
            {
                return("eps <= 0");
            }
            if (svmType == SvmType.CSvc || svmType == SvmType.EpsilonSvr || svmType == SvmType.NuSvr)
            {
                if (param.c <= 0)
                {
                    return("C <= 0");
                }
            }
            if (svmType == SvmType.NuSvc || svmType == SvmType.OneClass || svmType == SvmType.NuSvr)
            {
                if (param.nu <= 0 || param.nu > 1)
                {
                    return("nu <= 0 or nu > 1");
                }
            }
            if (svmType == SvmType.EpsilonSvr)
            {
                if (param.p < 0)
                {
                    return("p < 0");
                }
            }
            if (param.probability && svmType == SvmType.OneClass)
            {
                return("one-class SVM probability output not supported yet");
            }
            // check whether nu-svc is feasible
            if (svmType == SvmType.NuSvc)
            {
                int   l          = prob.Count;
                int   maxNrClass = 16;
                int   nrClass    = 0;
                int[] label      = new int[maxNrClass];
                int[] count      = new int[maxNrClass];
                int   i;
                for (i = 0; i < l; i++)
                {
                    int thisLabel = (int)prob.y[i];
                    int j;
                    for (j = 0; j < nrClass; j++)
                    {
                        if (thisLabel == label[j])
                        {
                            ++count[j];
                            break;
                        }
                    }
                    if (j == nrClass)
                    {
                        if (nrClass == maxNrClass)
                        {
                            maxNrClass *= 2;
                            int[] newData = new int[maxNrClass];
                            Array.Copy(label, 0, newData, 0, label.Length);
                            label   = newData;
                            newData = new int[maxNrClass];
                            Array.Copy(count, 0, newData, 0, count.Length);
                            count = newData;
                        }
                        label[nrClass] = thisLabel;
                        count[nrClass] = 1;
                        ++nrClass;
                    }
                }
                for (i = 0; i < nrClass; i++)
                {
                    int n1 = count[i];
                    for (int j = i + 1; j < nrClass; j++)
                    {
                        int n2 = count[j];
                        if (param.nu * (n1 + n2) / 2 > Math.Min(n1, n2))
                        {
                            return("specified nu is infeasible");
                        }
                    }
                }
            }
            return(null);
        }
        /// <summary>
        /// Predicts the class memberships of all the vectors in the problem.
        /// </summary>
        /// <param name="problem">The SVM Problem to solve</param>
        /// <param name="outputFile">File for result output</param>
        /// <param name="model">The Model to use</param>
        /// <param name="predict_probability">Whether to output a distribution over the classes</param>
        /// <returns>Percentage correctly labelled</returns>
        public static double Predict(
            Problem problem,
            string outputFile,
            Model model,
            bool predict_probability)
        {
            int          correct = 0;
            int          total = 0;
            double       error = 0;
            double       sumv = 0, sumy = 0, sumvv = 0, sumyy = 0, sumvy = 0;
            StreamWriter output = outputFile != null ? new StreamWriter(outputFile) : null;

            SvmType svm_type = Procedures.svm_get_svm_type(model);
            int     nr_class = Procedures.svm_get_nr_class(model);

            int[]    labels         = new int[nr_class];
            double[] prob_estimates = null;

            if (predict_probability)
            {
                if (svm_type == SvmType.EPSILON_SVR || svm_type == SvmType.NU_SVR)
                {
                    Console.WriteLine("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + Procedures.svm_get_svr_probability(model));
                }
                else
                {
                    Procedures.svm_get_labels(model, labels);
                    prob_estimates = new double[nr_class];
                    if (output != null)
                    {
                        output.Write("labels");
                        for (int j = 0; j < nr_class; j++)
                        {
                            output.Write(" " + labels[j]);
                        }
                        output.Write("\n");
                    }
                }
            }
            for (int i = 0; i < problem.Count; i++)
            {
                double target = problem.Y[i];
                Node[] x      = problem.X[i];

                double v;
                if (predict_probability && (svm_type == SvmType.C_SVC || svm_type == SvmType.NU_SVC))
                {
                    v = Procedures.svm_predict_probability(model, x, prob_estimates);
                    if (output != null)
                    {
                        output.Write(v + " ");
                        for (int j = 0; j < nr_class; j++)
                        {
                            output.Write(prob_estimates[j] + " ");
                        }
                        output.Write("\n");
                    }
                }
                else
                {
                    v = Procedures.svm_predict(model, x);
                    if (output != null)
                    {
                        output.Write(v + "\n");
                    }
                }

                if (v == target)
                {
                    ++correct;
                }
                error += (v - target) * (v - target);
                sumv  += v;
                sumy  += target;
                sumvv += v * v;
                sumyy += target * target;
                sumvy += v * target;
                ++total;
            }
            if (output != null)
            {
                output.Close();
            }
            return((double)correct / total);
        }
Example #19
0
 public static bool IsNuSVC(this SvmType svm_type)
 {
     return(svm_type == SvmType.NU_SVC);
 }
Example #20
0
 /// <summary>
 /// Default Constructor.  Gives good default values to all parameters.
 /// </summary>
 public Parameter()
 {
     _svmType = SvmType.C_SVC;
     _kernelType = KernelType.RBF;
     _degree = 3;
     _gamma = 0; // 1/k
     _coef0 = 0;
     _nu = 0.5;
     _cacheSize = 40;
     _C = 1;
     _eps = 1e-3;
     _p = 0.1;
     _shrinking = true;
     _probability = false;
     _weights = new Dictionary<int, double>();
 }
Example #21
0
 public static bool UsePParameter(this SvmType svm_type)
 {
     // public double p;	          // for EPSILON_SVR
     return(svm_type == SvmType.EPSILON_SVR);
 }
Example #22
0
 public SVC(SvmType svm_type, svm_problem prob, Kernel kernel, double C, double cache_size = 100, int probability = 0)
     : base(prob, (int)svm_type, kernel, C, 0.0, cache_size, 1e-3, 0.1, 1, probability, 0, new int[0], new double[0])
 {
 }
Example #23
0
 public SVR(SvmType svm_type, svm_problem prob, Kernel kernel, double C, double eps, bool probability, double cache_size)
     : base(prob, (int)svm_type, kernel, C, 0.0, cache_size, 1e-3, 0.1, 1, probability ? 1 : 0, 0, new int[0], new double[0])
 {
 }
Example #24
0
        public void SVMClassifierTrain(List <FeaturesWithLabel> featureSets, ClassifyOptions options, SvmType svm = SvmType.C_SVC, KernelType kernel = KernelType.RBF, bool probability = true, string outputFile = null)
        {
            // copy test multiclass Model
            Problem train = new Problem();

            train.X        = GetData(featureSets).ToArray();
            train.Y        = GetLabels(featureSets).ToArray();
            train.Count    = train.X.Count();
            train.MaxIndex = 300;//int.MaxValue;

            Parameter      param     = new Parameter();
            RangeTransform transform = RangeTransform.Compute(train);
            Problem        scaled    = transform.Scale(train);

            param.Gamma       = 1.0 / 3;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;

            int numberOfClasses = train.Y.Distinct().Count();

            if (numberOfClasses == 1)
            {
                throw new ArgumentException("Number of classes can't be one!");
            }
            if (svm == SvmType.C_SVC)
            {
                for (int i = 0; i < numberOfClasses; i++)
                {
                    param.Weights[i] = 1;
                }
            }
            var model = Training.Train(scaled, param);

            RangeTransform.Write(options.TransformFilePath, transform);
            SVM.BotSharp.MachineLearning.Model.Write(options.ModelFilePath, model);
            Console.Write("Training finished!");
        }
Example #25
0
        public SVR(SvmType svm_type, svm_problem prob, Kernel kernel, double C, double eps, bool probability, double cache_size)
            : base(prob, (int)svm_type, kernel, C, 0.0, cache_size, 1e-3, 0.1, 1, probability? 1: 0, 0, new int[0], new double[0])
        {

        }
Example #26
0
 public static bool IsSVR(this SvmType svm_type)
 {
     return(svm_type == SvmType.EPSILON_SVR || svm_type == SvmType.NU_SVR);
 }
Example #27
0
        public static double Predict(Problem problem, string outputFile, Model model, bool predict_probability, int MaxClassCount = 1)
        {
            int          num          = 0;
            int          num2         = 0;
            double       num3         = 0.0;
            double       num4         = 0.0;
            double       num5         = 0.0;
            double       num6         = 0.0;
            double       num7         = 0.0;
            double       num8         = 0.0;
            StreamWriter streamWriter = (outputFile != null) ? new StreamWriter(outputFile) : null;
            SvmType      svmType      = Procedures.svm_get_svm_type(model);
            int          num9         = Procedures.svm_get_nr_class(model);

            int[]    array  = new int[num9];
            double[] array2 = null;
            if (predict_probability)
            {
                if (svmType == SvmType.EPSILON_SVR || svmType == SvmType.NU_SVR)
                {
                    Console.WriteLine("Prob. model for test data: target value = predicted value + z,\nz: Laplace distribution e^(-|z|/sigma)/(2sigma),sigma=" + Procedures.svm_get_svr_probability(model));
                }
                else
                {
                    Procedures.svm_get_labels(model, array);
                    array2 = new double[num9];
                    if (streamWriter != null)
                    {
                        streamWriter.Write("labels");
                        for (int i = 0; i < num9; i++)
                        {
                            streamWriter.Write(" " + array[i]);
                        }
                        streamWriter.Write("\n");
                    }
                }
            }
            for (int j = 0; j < problem.Count; j++)
            {
                double num10 = problem.Y[j];
                Node[] x     = problem.X[j];
                double num11;
                if (predict_probability && (svmType == SvmType.C_SVC || svmType == SvmType.NU_SVC))
                {
                    num11 = Procedures.svm_predict_probability(model, x, array2);
                    if (streamWriter != null)
                    {
                        streamWriter.Write(num11 + " ");
                        for (int k = 0; k < num9; k++)
                        {
                            streamWriter.Write(array2[k] + " ");
                        }
                        streamWriter.Write("\n");
                    }
                }
                else
                {
                    num11 = Procedures.svm_predict(model, x);
                    if (MaxClassCount == 1)
                    {
                        if (streamWriter != null)
                        {
                            streamWriter.Write(num11 + "\n");
                        }
                    }
                    else
                    {
                        int[] array3 = default(int[]);
                        Procedures.svm_predict_multi(model, x, out array3);
                        List <KeyValuePair <int, int> > list = new List <KeyValuePair <int, int> >(array3.Length);
                        for (int l = 0; l < array3.Length; l++)
                        {
                            list.Add(new KeyValuePair <int, int>(l, array3[l]));
                        }
                        list.Sort((KeyValuePair <int, int> first, KeyValuePair <int, int> second) => - first.Value.CompareTo(second.Value));
                        for (int m = 0; m < Math.Min(MaxClassCount, list.Count); m++)
                        {
                            if (m > 0)
                            {
                                streamWriter.Write('\t');
                            }
                            streamWriter.Write(list[m].Key);
                        }
                        streamWriter.Write("\n");
                    }
                }
                if (num11 == num10)
                {
                    num++;
                }
                num3 += (num11 - num10) * (num11 - num10);
                num4 += num11;
                num5 += num10;
                num6 += num11 * num11;
                num7 += num10 * num10;
                num8 += num11 * num10;
                num2++;
            }
            if (streamWriter != null)
            {
                streamWriter.Close();
            }
            if (svmType != SvmType.EPSILON_SVR && svmType != SvmType.NU_SVR)
            {
                return((double)num / (double)num2);
            }
            return(((double)problem.Count * num8 - num4 * num5) / (Math.Sqrt((double)problem.Count * num6 - num4 * num4) * Math.Sqrt((double)problem.Count * num7 - num5 * num5)));
        }
Example #28
0
 public SVC(SvmType svm_type, svm_problem prob, Kernel kernel, double C, double cache_size = 100, int probability = 0)
     : base(prob, (int)svm_type, kernel, C, 0.0, cache_size, 1e-3, 0.1, 1, probability, 0, new int[0], new double[0])
 {
 }
Example #29
0
        public void SVMClassifierTrain(List <Sentence> sentences, ClassifyOptions options, SvmType svm = SvmType.C_SVC, KernelType kernel = KernelType.RBF, bool probability = true, string outputFile = null)
        {
            // copy test multiclass Model
            Problem train = new Problem();

            train.X        = GetData(sentences).ToArray();
            train.Y        = GetLabels(sentences).ToArray();
            train.Count    = train.X.Count();
            train.MaxIndex = train.X[0].Count();//int.MaxValue;

            Parameter param = new Parameter();

            transform = RangeTransform.Compute(train);
            Problem scaled = transform.Scale(train);

            param.Gamma       = 1.0 / 3;
            param.SvmType     = svm;
            param.KernelType  = kernel;
            param.Probability = probability;

            int numberOfClasses = train.Y.OrderBy(x => x).Distinct().Count();

            if (numberOfClasses == 1)
            {
                throw new ArgumentException("Number of classes can't be one!");
            }
            if (svm == SvmType.C_SVC)
            {
                for (int i = 0; i < numberOfClasses; i++)
                {
                    param.Weights[i] = 1;
                }
            }

            model = Training.Train(scaled, param);

            Console.Write("Training finished!");
        }
Example #30
0
 public static bool IsSVROrOneClass(this SvmType svm_type)
 {
     return(svm_type == SvmType.ONE_CLASS || svm_type == SvmType.EPSILON_SVR || svm_type == SvmType.NU_SVR);
 }