public override ClassificationModel Train(BaseVector[] x, int[][] y, int ngroups, Parameters param, int nthreads,
                                                  Action <double> reportProgress)
        {
            string err = CheckInput(x, y, ngroups);

            if (err != null)
            {
                throw new Exception(err);
            }
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.CSvc,
                c = param.GetParam <double>("C").Value
            };

            bool[]            invert;
            SvmProblem[]      problems = CreateProblems(x, y, ngroups, out invert);
            SvmModel[]        models   = new SvmModel[problems.Length];
            ThreadDistributor td       = new ThreadDistributor(nthreads, models.Length,
                                                               i => { models[i] = SvmMain.SvmTrain(problems[i], sp); }, fractionDone => { reportProgress?.Invoke(fractionDone); });

            td.Start();
            return(new SvmClassificationModel(models, invert));
        }
        private static int[] RankBinary(SvmProblem prob, SvmParameter param, double redfactor)
        {
            int nfeatures = prob.x[0].Length;

            int[]      result            = new int[nfeatures];
            int[]      survivingFeatures = ArrayUtils.ConsecutiveInts(nfeatures);
            SvmProblem problem           = prob.Copy();
            int        p = nfeatures - 1;

            int[] indices = ArrayUtils.ConsecutiveInts(nfeatures);
            while (survivingFeatures.Length > 0)
            {
                problem = problem.ExtractFeatures(survivingFeatures);
                indices = ArrayUtils.SubArray(indices, survivingFeatures);
                int      nfeatures2 = survivingFeatures.Length;
                double[] criteria   = ComputeRankingCriteria(SvmMain.SvmTrain(problem, param)
                                                             .ComputeBinaryClassifierWeights(nfeatures2));
                int[] order = ArrayUtils.Order(criteria);
                int   numFeaturesToRemove = Math.Max((int)Math.Round(nfeatures2 / redfactor), 1);
                for (int i = 0; i < numFeaturesToRemove; ++i)
                {
                    result[p--] = indices[order[i]];
                }
                survivingFeatures = ArrayUtils.SubArray(order, numFeaturesToRemove, nfeatures2);
            }
            return(result);
        }
Example #3
0
        public static ITrainer <TIn, TOut> Create <TIn, TOut>(ISvm <TIn, TOut> svm, IKernel kernel,
                                                              double cacheSize = 128, double tolerance = 0.001, bool shrinking = true, bool probability = false)
            where TOut : struct
        {
            var svmBase    = svm as SvmBase <TIn, TOut>;
            var kernelBase = kernel as KernelBase;

            if (svmBase == null || kernelBase == null)
            {
                throw new ApplicationException("Bad svm or/and kernel parameters");
            }

            var parameters = new SvmParameter
            {
                CacheSize   = cacheSize,
                Eps         = tolerance,
                Shrinking   = shrinking,
                Probability = probability
            };

            svmBase.FillParameters(parameters);
            kernelBase.FillParameters(parameters);

            return(svmBase.GetTrainer(parameters));
        }
Example #4
0
 internal override void FillParameters(SvmParameter param)
 {
     param.KernelType = KernelType.Poly;
     param.Gamma      = Gamma;
     param.Degree     = Degree;
     param.Coef0      = R;
 }
Example #5
0
 internal OneClassQ(SvmProblem prob, SvmParameter param)
     : base(prob.Count, prob.x, param)
 {
     cache = new SvmCache(prob.Count, (long) (param.cacheSize*(1 << 20)));
     qd = new double[prob.Count];
     for (int i = 0; i < prob.Count; i++){
         qd[i] = KernelFunctionEval(i, i);
     }
 }
Example #6
0
 internal OneClassQ(SvmProblem prob, SvmParameter param) : base(prob.Count, prob.x, param)
 {
     cache = new SvmCache(prob.Count, (long)(param.cacheSize * (1 << 20)));
     qd    = new double[prob.Count];
     for (int i = 0; i < prob.Count; i++)
     {
         qd[i] = KernelFunctionEval(i, i);
     }
 }
        private static int[] RankBinary(SvmProblem prob, SvmParameter param)
        {
            int nfeatures = prob.x[0].Length;

            double[] criteria = ComputeRankingCriteria(SvmMain.SvmTrain(prob, param).ComputeBinaryClassifierWeights(nfeatures));
            int[]    order    = ArrayUtils.Order(criteria);
            Array.Reverse(order);
            return(order);
        }
Example #8
0
 internal SvcQ(SvmProblem prob, SvmParameter param, short[] y1)
     : base(prob.Count, prob.x, param)
 {
     y = (short[]) y1.Clone();
     cache = new SvmCache(prob.Count, (long) (param.cacheSize*(1 << 20)));
     qd = new double[prob.Count];
     for (int i = 0; i < prob.Count; i++){
         qd[i] = KernelFunctionEval(i, i);
     }
 }
Example #9
0
 internal SvcQ(SvmProblem prob, SvmParameter param, short[] y1) : base(prob.Count, prob.x, param)
 {
     y     = (short[])y1.Clone();
     cache = new SvmCache(prob.Count, (long)(param.cacheSize * (1 << 20)));
     qd    = new double[prob.Count];
     for (int i = 0; i < prob.Count; i++)
     {
         qd[i] = KernelFunctionEval(i, i);
     }
 }
Example #10
0
        public RegressionModel Train(BaseVector[] x, float[] y, Parameters param, int nthreads)
        {
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.EpsilonSvr,
                c = param.GetParam <double>("C").Value
            };
            SvmModel model = SvmMain.SvmTrain(new SvmProblem(x, y), sp);

            return(new SvmRegressionModel(model));
        }
Example #11
0
        public override RegressionModel Train(BaseVector[] x, int[] nominal, double[] y, Parameters param, int nthreads, Action <double> reportProgress)
        {
            x = ClassificationMethod.ToOneHotEncoding(x, nominal);
            ParameterWithSubParams <int> kernelParam = param.GetParamWithSubParams <int>("Kernel");
            SvmParameter sp = new SvmParameter {
                kernelFunction = KernelFunctions.GetKernelFunction(kernelParam.Value, kernelParam.GetSubParameters()),
                svmType        = SvmType.EpsilonSvr,
                c = param.GetParam <double>("C").Value
            };
            SvmModel model = SvmMain.SvmTrain(new SvmProblem(x, y), sp);

            return(new SvmRegressionModel(model));
        }
Example #12
0
        static void Run(string trainingSet, string testSet)
        {
            // step 1: dataset
            var container = new MovieTweetingsDataContainer();

            var reader = new MovieTweetingsReader(trainingSet, testSet);

            reader.LoadData(container);

            Console.WriteLine("Data container statistics:\n {0}", container.ToString());

            var dataset = new ItemRatingDataset(container);

            var featureBuilder = new MovieTweetingLibSvmFeatureBuilder(container);


            // svm parameters
            var svmParameters = new SvmParameter
            {
                SvmType     = SvmType.C_SVC,
                KernelType  = KernelType.Linear,
                CacheSize   = 128,
                C           = 1,
                Eps         = 1e-3,
                Shrinking   = true,
                Probability = false
            };

            // step 2: recommender

            var labelSelector = new Func <ItemRating, double>(ir =>
            {
                var t = container.Tweets[ir];
                return(((t.RetweetCount + t.FavoriteCount) > 0) ? 1.0 : 0.0);
            });

            var recommender = new LibSvmClassifier(svmParameters, featureBuilder, labelSelector);

            // step3: evaluation
            var ep = new EvaluationPipeline <ItemRating>(new EvalutationContext <ItemRating>(recommender, dataset));

            ep.Evaluators.Add(new WriteChallengeOutput(container, "test_output.dat"));

            ep.Run();
        }
        public override int[] Rank(BaseVector[] x, int[][] y, int ngroups, Parameters param, IGroupDataProvider data,
                                   int nthreads, Action <double> reportProgress)
        {
            SvmParameter sp = new SvmParameter {
                kernelFunction = new LinearKernelFunction(),
                svmType        = SvmType.CSvc,
                c = param.GetParam <double>("C").Value
            };

            bool[]       invert;
            SvmProblem[] problems   = CreateProblems(x, y, ngroups, out invert);
            int[][]      rankedSets = new int[problems.Length][];
            for (int i = 0; i < problems.Length; ++i)
            {
                rankedSets[i] = RankBinary(problems[i], sp);
            }
            return(CombineRankedFeaturesLists(rankedSets));
        }
Example #14
0
        public static void Run()
        {
            Console.WriteLine("OneClassDemo");
            var trainData = DemoHelper.GenerateClass(0, 0.5, 0.5, 100);

            var parameters = new SvmParameter
            {
                SvmType     = SvmType.ONE_CLASS,
                KernelType  = KernelType.Rbf,
                Gamma       = 0.5,
                Nu          = 0.5,
                CacheSize   = 128,
                Eps         = 1e-3,
                Shrinking   = true,
                Probability = false
            };

            var problem = new SvmProblem
            {
                Y = trainData.Select(p => 1.0).ToArray(),
                X = trainData.Select(p => p.ToSvmNodes()).ToArray()
            };

            parameters.Check(problem);

            var model = Svm.Train(problem, parameters);

            var x    = new Point(0.9, 0.9).ToSvmNodes();
            var resx = model.Predict(x);

            Console.WriteLine(resx);

            var y    = new Point(0.5, 0.5).ToSvmNodes();
            var resy = model.Predict(y);

            Console.WriteLine(resy);

            var z    = new Point(0.45, 0.45).ToSvmNodes();
            var resz = model.Predict(z);

            Console.WriteLine(resz);
        }
Example #15
0
        public static void Run()
        {
            Console.WriteLine("EpsSVRDemo");
            var rnd = new Random();

            var trainData = DemoHelper.Range(-10.0, 10.01, 0.1).Select(val => new { X = val, Y = DemoHelper.Sinc(val) + (rnd.NextDouble() - 0.5) / 4 });

            var parameters = new SvmParameter
            {
                SvmType     = SvmType.EPSILON_SVR,
                KernelType  = KernelType.Rbf,
                Gamma       = 0.5,
                CacheSize   = 128,
                C           = 1,
                Eps         = 1e-3,
                P           = 0.1,
                Shrinking   = true,
                Probability = false
            };

            var problem = new SvmProblem
            {
                Y = trainData.Select(p => p.Y).ToArray(),
                X = trainData.Select(p => p.X.ToSvmNodes()).ToArray()
            };

            parameters.Check(problem);

            var model = Svm.Train(problem, parameters);

            foreach (var item in DemoHelper.Range(-1.0, 1.01, 0.1))
            {
                var x     = item.ToSvmNodes();
                var yPred = model.Predict(x);
                var yReal = DemoHelper.Sinc(item);
                Console.WriteLine("x: {0}", item);
                Console.WriteLine("y_real: {0}", yReal);
                Console.WriteLine("y_pred: {0}", yPred);
                Console.WriteLine();
            }
        }
Example #16
0
 internal SvrQ(SvmProblem prob, SvmParameter param)
     : base(prob.Count, prob.x, param)
 {
     l = prob.Count;
     cache = new SvmCache(l, (long) (param.cacheSize*(1 << 20)));
     qd = new double[2*l];
     sign = new short[2*l];
     index = new int[2*l];
     for (int k = 0; k < l; k++){
         sign[k] = 1;
         sign[k + l] = -1;
         index[k] = k;
         index[k + l] = k;
         qd[k] = KernelFunctionEval(k, k);
         qd[k + l] = qd[k];
     }
     buffer = new float[2][];
     buffer[0] = new float[2*l];
     buffer[1] = new float[2*l];
     nextBuffer = 0;
 }
Example #17
0
 internal SvrQ(SvmProblem prob, SvmParameter param) : base(prob.Count, prob.x, param)
 {
     l     = prob.Count;
     cache = new SvmCache(l, (long)(param.cacheSize * (1 << 20)));
     qd    = new double[2 * l];
     sign  = new short[2 * l];
     index = new int[2 * l];
     for (int k = 0; k < l; k++)
     {
         sign[k]      = 1;
         sign[k + l]  = -1;
         index[k]     = k;
         index[k + l] = k;
         qd[k]        = KernelFunctionEval(k, k);
         qd[k + l]    = qd[k];
     }
     buffer     = new float[2][];
     buffer[0]  = new float[2 * l];
     buffer[1]  = new float[2 * l];
     nextBuffer = 0;
 }
Example #18
0
        public static void Run()
        {
            Console.WriteLine("CSVMDemo");
            var class1 = DemoHelper.GenerateClass(0, 0.1, 0.1, 50);
            var class2 = DemoHelper.GenerateClass(1, 0.8, 0.8, 50);

            var trainData = class1.Concat(class2);

            var parameters = new SvmParameter
            {
                SvmType     = SvmType.C_SVC,
                KernelType  = KernelType.Rbf,
                Gamma       = 0.5,
                CacheSize   = 128,
                C           = 1,
                Eps         = 1e-3,
                Shrinking   = true,
                Probability = false
            };

            var problem = new SvmProblem
            {
                Y = trainData.Select(p => (double)p.Label).ToArray(),
                X = trainData.Select(p => p.ToSvmNodes()).ToArray()
            };

            parameters.Check(problem);

            var model = Svm.Train(problem, parameters);

            var x    = new Point(0.9, 0.9).ToSvmNodes();
            var resx = model.Predict(x);

            Console.WriteLine(resx);

            var y    = new Point(0.1, 0.1).ToSvmNodes();
            var resy = model.Predict(y);

            Console.WriteLine(resy);
        }
Example #19
0
 internal OneClassTrainer(SvmParameter parameters)
     : base(parameters)
 {
 }
Example #20
0
 internal override ITrainer <Tuple <double[], double>, double> GetTrainer(SvmParameter param)
 {
     return(new SvrTrainer(param));
 }
Example #21
0
 internal override void FillParameters(SvmParameter param)
 {
     param.SvmType = SvmType.NU_SVR;
     param.C       = C;
     param.Nu      = Nu;
 }
Example #22
0
 internal abstract ITrainer <TIn, TOut> GetTrainer(SvmParameter param);
Example #23
0
 internal SvcTrainer(SvmParameter parameters)
     : base(parameters)
 {
 }
Example #24
0
 internal override void FillParameters(SvmParameter param)
 {
     param.SvmType = SvmType.ONE_CLASS;
     param.Nu      = Nu;
 }
Example #25
0
 internal override ITrainer <double[], bool> GetTrainer(SvmParameter param)
 {
     return(new OneClassTrainer(param));
 }
Example #26
0
 internal override void FillParameters(SvmParameter param)
 {
     param.KernelType = KernelType.Linear;
 }
Example #27
0
 internal abstract void FillParameters(SvmParameter param);
Example #28
0
 public LibSvmClassifier(SvmParameter parameters, LibSvmFeatureBuilder featureBuilder, Func <ItemRating, double> labelSelector)
 {
     Parameters     = parameters;
     FeatureBuilder = featureBuilder;
     LabelSelector  = labelSelector;
 }
 internal override void FillParameters(SvmParameter param)
 {
     param.SvmType = SvmType.C_SVC;
     param.C       = C;
 }
Example #30
0
 protected TrainerBase(SvmParameter parameters)
 {
     _parameters = parameters;
 }
Example #31
0
 internal override void FillParameters(SvmParameter param)
 {
     param.KernelType = KernelType.Sigmoid;
     param.Gamma      = Gamma;
     param.Coef0      = R;
 }
 internal override void FillParameters(SvmParameter param)
 {
     param.SvmType = SvmType.EPSILON_SVR;
     param.C       = C;
     param.P       = Eps;
 }
Example #33
0
 internal override void FillParameters(SvmParameter param)
 {
     param.KernelType = KernelType.Rbf;
     param.Gamma      = Gamma;
 }