示例#1
0
文件: Kernel.cs 项目: haf/LibSvmSharp
        public static double k_function(SvmNode[] x, SvmNode[] y, SvmParameter param)
        {
            switch (param.KernelType)
              {
            case KernelType.Linear:
              return dot(x, y);
            case KernelType.Poly:
              return powi(param.Gamma*dot(x, y) + param.Coef0, param.Degree);
            case KernelType.Rbf:
              double sum = 0;
              int xlen = x.Length;
              int ylen = y.Length;
              int i = 0;
              int j = 0;
              while (i < xlen && j < ylen)
              {
            if (x[i].Index == y[j].Index)
            {
              double d = x[i++].Value - y[j++].Value;
              sum += d * d;
            }
            else
            {
              if (x[i].Index > y[j].Index)
              {
                sum += y[j].Value*y[j].Value;
                ++j;
              }
              else
              {
                sum += x[i].Value*x[i].Value;
                ++i;
              }
            }
              }

              while (i < xlen)
              {
            sum += x[i].Value*x[i].Value;
            ++i;
              }

              while (j < ylen)
              {
            sum += y[j].Value*y[j].Value;
            ++j;
              }

              return Math.Exp(-param.Gamma*sum);
            case KernelType.Sigmoid:
              return Math.Tanh(param.Gamma*dot(x, y) + param.Coef0);
            case KernelType.Precomputed:
              return x[(int) (y[0].Value)].Value;
            default:
              throw new ApplicationException("Bad kernel_type");
              }
        }
示例#2
0
        public static SvmNode[] ToSvmNodes(this double[] vector)
        {
            var result = new SvmNode[vector.Length];

              for (int i = 0; i < vector.Length; i++)
              {
            result[i] = new SvmNode(i, vector[i]);
              }

              return result;
        }
示例#3
0
文件: Kernel.cs 项目: haf/LibSvmSharp
        protected Kernel(int l, SvmNode[][] x_, SvmParameter param)
        {
            this.kernel_type = param.KernelType;
              this.degree = param.Degree;
              this.gamma = param.Gamma;
              this.coef0 = param.Coef0;

              x = (SvmNode[][])x_.Clone();

              if (kernel_type == KernelType.Rbf)
              {
            x_square = new double[l];
            for (int i = 0; i < l; i++)
              x_square[i] = dot(x[i], x[i]);
              }
              else x_square = null;
        }
        public override LibSvm.SvmNode[] GetSvmNode(ItemRating rating)
        {
            int followersCount = _container.Tweets[rating].FollowersCount;
            double followersFeature = followersCount > 1000 ? 1.0 : (double)followersCount / 1000;

            int friendsCount = _container.Tweets[rating].FriendsCount;
            double friendsFeature = friendsCount > 1000 ? 1.0 : (double)friendsCount / 1000;

            var svmNode = new SvmNode[6] {
                //new SvmNode(Mapper.ToInternalID("RetweetCount"), _container.Tweets[rating].RetweetCount),
                new SvmNode(Mapper.ToInternalID("Rating"), rating.Rating),
                new SvmNode(Mapper.ToInternalID("FollowersCount"),  followersFeature),
                new SvmNode(Mapper.ToInternalID("FriendsCount"),  friendsFeature),
                new SvmNode(Mapper.ToInternalID(rating.User.Id), 1),
                new SvmNode(Mapper.ToInternalID(rating.Item.Id + rating.Domain.Id), 1),
                new SvmNode(Mapper.ToInternalID(_container.Tweets[rating].MovieUrl), 1),
            };

            return svmNode;
        }
示例#5
0
        //
        // Interface functions
        //
        public static SvmModel Train(SvmProblem prob, SvmParameter param)
        {
            var model = new SvmModel();
              model.Param = param;

              if (param.SvmType.IsSVROrOneClass())
              {
            // regression or one-class-svm
            model.NrClass = 2;
            model.Label = null;
            model.SupportVectorsNumbers = null;
            model.ProbA = null; model.ProbB = null;
            model.SupportVectorsCoefficients = new double[1][];

            if (param.Probability && param.SvmType.IsSVR())
            {
              model.ProbA = new double[1];
              model.ProbA[0] = svm_svr_probability(prob, param);
            }

            DecisionFunction f = svm_train_one(prob, param, 0, 0);
            model.Rho = new double[1];
            model.Rho[0] = f.Rho;

            int nSV = 0;
            int i;
            for (i = 0; i < prob.Lenght; i++)
              if (Math.Abs(f.Alpha[i]) > 0) ++nSV;
            model.TotalSupportVectorsNumber = nSV;
            model.SupportVectors = new SvmNode[nSV][];
            model.SupportVectorsCoefficients[0] = new double[nSV];
            int j = 0;
            for (i = 0; i < prob.Lenght; i++)
              if (Math.Abs(f.Alpha[i]) > 0)
              {
            model.SupportVectors[j] = prob.X[i];
            model.SupportVectorsCoefficients[0][j] = f.Alpha[i];
            ++j;
              }
              }
              else
              {
            // classification
            int l = prob.Lenght;
            int[] perm = new int[l];

            int nr_class;
            int[] label;
            int[] start;
            int[] count;

            // group training data of the same class
            svm_group_classes(prob, out nr_class, out label, out start, out count, perm);

            SvmNode[][] x = new SvmNode[l][];
            int i;
            for (i = 0; i < l; i++)
              x[i] = prob.X[perm[i]];

            // calculate weighted C

            double[] weighted_C = new double[nr_class];
            for (i = 0; i < nr_class; i++)
              weighted_C[i] = param.C;
            for (i = 0; i < param.WeightsCount; i++)
            {
              int j;
              for (j = 0; j < nr_class; j++)
            if (param.WeightLabel[i] == label[j])
              break;
              if (j == nr_class)
            Console.Error.WriteLine("warning: class label " + param.WeightLabel[i] + " specified in weight is not found\n");
              else
            weighted_C[j] *= param.Weight[i];
            }

            // train k*(k-1)/2 models

            var nonzero = new bool[l];
            for (i = 0; i < l; i++)
              nonzero[i] = false;
            var f = new DecisionFunction[nr_class * (nr_class - 1) / 2];

            double[] probA = null, probB = null;
            if (param.Probability)
            {
              probA = new double[nr_class * (nr_class - 1) / 2];
              probB = new double[nr_class * (nr_class - 1) / 2];
            }

            int p = 0;
            for (i = 0; i < nr_class; i++)
              for (int j = i + 1; j < nr_class; j++)
              {

            int si = start[i], sj = start[j];
            int ci = count[i], cj = count[j];
            var subprobLenght = ci + cj;
            var sub_prob = new SvmProblem
            {
              X = new SvmNode[subprobLenght][],
              Y = new double[subprobLenght]
            };

            int k;
            for (k = 0; k < ci; k++)
            {
              sub_prob.X[k] = x[si + k];
              sub_prob.Y[k] = +1;
            }
            for (k = 0; k < cj; k++)
            {
              sub_prob.X[ci + k] = x[sj + k];
              sub_prob.Y[ci + k] = -1;
            }

            if (param.Probability)
            {
              double[] probAB = new double[2];
              svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
              probA[p] = probAB[0];
              probB[p] = probAB[1];
            }

            f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
            for (k = 0; k < ci; k++)
              if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0)
                nonzero[si + k] = true;
            for (k = 0; k < cj; k++)
              if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0)
                nonzero[sj + k] = true;
            ++p;
              }

            // build output

            model.NrClass = nr_class;

            model.Label = new int[nr_class];
            for (i = 0; i < nr_class; i++)
              model.Label[i] = label[i];

            model.Rho = new double[nr_class * (nr_class - 1) / 2];
            for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
              model.Rho[i] = f[i].Rho;

            if (param.Probability)
            {
              model.ProbA = new double[nr_class * (nr_class - 1) / 2];
              model.ProbB = new double[nr_class * (nr_class - 1) / 2];
              for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
              {
            model.ProbA[i] = probA[i];
            model.ProbB[i] = probB[i];
              }
            }
            else
            {
              model.ProbA = null;
              model.ProbB = null;
            }

            int nnz = 0;
            int[] nz_count = new int[nr_class];
            model.SupportVectorsNumbers = new int[nr_class];
            for (i = 0; i < nr_class; i++)
            {
              int nSV = 0;
              for (int j = 0; j < count[i]; j++)
            if (nonzero[start[i] + j])
            {
              ++nSV;
              ++nnz;
            }
              model.SupportVectorsNumbers[i] = nSV;
              nz_count[i] = nSV;
            }

            Svm.info("Total nSV = " + nnz + "\n");

            model.TotalSupportVectorsNumber = nnz;
            model.SupportVectors = new SvmNode[nnz][];
            p = 0;
            for (i = 0; i < l; i++)
              if (nonzero[i]) model.SupportVectors[p++] = x[i];

            int[] nz_start = new int[nr_class];
            nz_start[0] = 0;
            for (i = 1; i < nr_class; i++)
              nz_start[i] = nz_start[i - 1] + nz_count[i - 1];

            model.SupportVectorsCoefficients = new double[nr_class - 1][];
            for (i = 0; i < nr_class - 1; i++)
              model.SupportVectorsCoefficients[i] = new double[nnz];

            p = 0;
            for (i = 0; i < nr_class; i++)
              for (int j = i + 1; j < nr_class; j++)
              {
            // classifier (i,j): coefficients with
            // i are in sv_coef[j-1][nz_start[i]...],
            // j are in sv_coef[i][nz_start[j]...]

            int si = start[i];
            int sj = start[j];
            int ci = count[i];
            int cj = count[j];

            int q = nz_start[i];
            int k;
            for (k = 0; k < ci; k++)
              if (nonzero[si + k])
                model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k];
            q = nz_start[j];
            for (k = 0; k < cj; k++)
              if (nonzero[sj + k])
                model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k];
            ++p;
              }
              }
              return model;
        }
示例#6
0
文件: Kernel.cs 项目: haf/LibSvmSharp
 static double dot(SvmNode[] x, SvmNode[] y)
 {
     double sum = 0;
       int xlen = x.Length;
       int ylen = y.Length;
       int i = 0;
       int j = 0;
       while (i < xlen && j < ylen)
       {
     if (x[i].Index == y[j].Index)
     {
       sum += x[i++].Value * y[j++].Value;
     }
     else
     {
       if (x[i].Index > y[j].Index)
       {
     ++j;
       }
       else
       {
     ++i;
       }
     }
       }
       return sum;
 }
示例#7
0
        //
        // Interface functions
        //
        public static SvmModel Train(SvmProblem prob, SvmParameter param)
        {
            var model = new SvmModel();

            model.Param = param;

            if (param.SvmType.IsSVROrOneClass())
            {
                // regression or one-class-svm
                model.NrClass = 2;
                model.Label   = null;
                model.SupportVectorsNumbers = null;
                model.ProbA = null; model.ProbB = null;
                model.SupportVectorsCoefficients = new double[1][];

                if (param.Probability && param.SvmType.IsSVR())
                {
                    model.ProbA    = new double[1];
                    model.ProbA[0] = svm_svr_probability(prob, param);
                }

                DecisionFunction f = svm_train_one(prob, param, 0, 0);
                model.Rho    = new double[1];
                model.Rho[0] = f.Rho;

                int nSV = 0;
                int i;
                for (i = 0; i < prob.Lenght; i++)
                {
                    if (Math.Abs(f.Alpha[i]) > 0)
                    {
                        ++nSV;
                    }
                }
                model.TotalSupportVectorsNumber     = nSV;
                model.SupportVectors                = new SvmNode[nSV][];
                model.SupportVectorsCoefficients[0] = new double[nSV];
                int j = 0;
                for (i = 0; i < prob.Lenght; i++)
                {
                    if (Math.Abs(f.Alpha[i]) > 0)
                    {
                        model.SupportVectors[j] = prob.X[i];
                        model.SupportVectorsCoefficients[0][j] = f.Alpha[i];
                        ++j;
                    }
                }
            }
            else
            {
                // classification
                int   l    = prob.Lenght;
                int[] perm = new int[l];

                int   nr_class;
                int[] label;
                int[] start;
                int[] count;

                // group training data of the same class
                svm_group_classes(prob, out nr_class, out label, out start, out count, perm);

                if (nr_class == 1)
                {
                    Svm.info("WARNING: training data in only one class. See README for details.\n");
                }

                SvmNode[][] x = new SvmNode[l][];
                int         i;
                for (i = 0; i < l; i++)
                {
                    x[i] = prob.X[perm[i]];
                }

                // calculate weighted C

                double[] weighted_C = new double[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    weighted_C[i] = param.C;
                }
                for (i = 0; i < param.WeightsCount; i++)
                {
                    int j;
                    for (j = 0; j < nr_class; j++)
                    {
                        if (param.WeightLabel[i] == label[j])
                        {
                            break;
                        }
                    }
                    if (j == nr_class)
                    {
                        System.Diagnostics.Debug.WriteLine("WARNING: class label " + param.WeightLabel[i] + " specified in weight is not found\n");
                    }
                    else
                    {
                        weighted_C[j] *= param.Weight[i];
                    }
                }

                // train k*(k-1)/2 models

                var nonzero = new bool[l];
                for (i = 0; i < l; i++)
                {
                    nonzero[i] = false;
                }
                var f = new DecisionFunction[nr_class * (nr_class - 1) / 2];

                double[] probA = null, probB = null;
                if (param.Probability)
                {
                    probA = new double[nr_class * (nr_class - 1) / 2];
                    probB = new double[nr_class * (nr_class - 1) / 2];
                }

                int p = 0;
                for (i = 0; i < nr_class; i++)
                {
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        int si = start[i], sj = start[j];
                        int ci = count[i], cj = count[j];
                        var subprobLenght = ci + cj;
                        var sub_prob      = new SvmProblem
                        {
                            X = new SvmNode[subprobLenght][],
                            Y = new double[subprobLenght]
                        };

                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            sub_prob.X[k] = x[si + k];
                            sub_prob.Y[k] = +1;
                        }
                        for (k = 0; k < cj; k++)
                        {
                            sub_prob.X[ci + k] = x[sj + k];
                            sub_prob.Y[ci + k] = -1;
                        }

                        if (param.Probability)
                        {
                            double[] probAB = new double[2];
                            svm_binary_svc_probability(sub_prob, param, weighted_C[i], weighted_C[j], probAB);
                            probA[p] = probAB[0];
                            probB[p] = probAB[1];
                        }

                        f[p] = svm_train_one(sub_prob, param, weighted_C[i], weighted_C[j]);
                        for (k = 0; k < ci; k++)
                        {
                            if (!nonzero[si + k] && Math.Abs(f[p].Alpha[k]) > 0)
                            {
                                nonzero[si + k] = true;
                            }
                        }
                        for (k = 0; k < cj; k++)
                        {
                            if (!nonzero[sj + k] && Math.Abs(f[p].Alpha[ci + k]) > 0)
                            {
                                nonzero[sj + k] = true;
                            }
                        }
                        ++p;
                    }
                }

                // build output

                model.NrClass = nr_class;

                model.Label = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    model.Label[i] = label[i];
                }

                model.Rho = new double[nr_class * (nr_class - 1) / 2];
                for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                {
                    model.Rho[i] = f[i].Rho;
                }

                if (param.Probability)
                {
                    model.ProbA = new double[nr_class * (nr_class - 1) / 2];
                    model.ProbB = new double[nr_class * (nr_class - 1) / 2];
                    for (i = 0; i < nr_class * (nr_class - 1) / 2; i++)
                    {
                        model.ProbA[i] = probA[i];
                        model.ProbB[i] = probB[i];
                    }
                }
                else
                {
                    model.ProbA = null;
                    model.ProbB = null;
                }

                int   nnz      = 0;
                int[] nz_count = new int[nr_class];
                model.SupportVectorsNumbers = new int[nr_class];
                for (i = 0; i < nr_class; i++)
                {
                    int nSV = 0;
                    for (int j = 0; j < count[i]; j++)
                    {
                        if (nonzero[start[i] + j])
                        {
                            ++nSV;
                            ++nnz;
                        }
                    }
                    model.SupportVectorsNumbers[i] = nSV;
                    nz_count[i] = nSV;
                }

                Svm.info("Total nSV = " + nnz + "\n");

                model.TotalSupportVectorsNumber = nnz;
                model.SupportVectors            = new SvmNode[nnz][];
                p = 0;
                for (i = 0; i < l; i++)
                {
                    if (nonzero[i])
                    {
                        model.SupportVectors[p++] = x[i];
                    }
                }

                int[] nz_start = new int[nr_class];
                nz_start[0] = 0;
                for (i = 1; i < nr_class; i++)
                {
                    nz_start[i] = nz_start[i - 1] + nz_count[i - 1];
                }

                model.SupportVectorsCoefficients = new double[nr_class - 1][];
                for (i = 0; i < nr_class - 1; i++)
                {
                    model.SupportVectorsCoefficients[i] = new double[nnz];
                }

                p = 0;
                for (i = 0; i < nr_class; i++)
                {
                    for (int j = i + 1; j < nr_class; j++)
                    {
                        // classifier (i,j): coefficients with
                        // i are in sv_coef[j-1][nz_start[i]...],
                        // j are in sv_coef[i][nz_start[j]...]

                        int si = start[i];
                        int sj = start[j];
                        int ci = count[i];
                        int cj = count[j];

                        int q = nz_start[i];
                        int k;
                        for (k = 0; k < ci; k++)
                        {
                            if (nonzero[si + k])
                            {
                                model.SupportVectorsCoefficients[j - 1][q++] = f[p].Alpha[k];
                            }
                        }
                        q = nz_start[j];
                        for (k = 0; k < cj; k++)
                        {
                            if (nonzero[sj + k])
                            {
                                model.SupportVectorsCoefficients[i][q++] = f[p].Alpha[ci + k];
                            }
                        }
                        ++p;
                    }
                }
            }
            return(model);
        }