Exemplo n.º 1
0
        public static void Test_CollaborativeFilteringRSCostFunction_Evaluate(double lambda)
        {
            int num_users = 4; int num_movies = 5; int num_features = 3;

            List <List <double> > X     = DblDataTableUtil.LoadDataSet("X.txt");
            List <List <double> > Y     = DblDataTableUtil.LoadDataSet("Y.txt");
            List <List <int> >    R     = IntDataTableUtil.LoadDataSet("R.txt");
            List <List <double> > Theta = DblDataTableUtil.LoadDataSet("Theta.txt");

            X     = DblDataTableUtil.SubMatrix(X, num_movies, num_features);
            Y     = DblDataTableUtil.SubMatrix(Y, num_movies, num_users);
            R     = IntDataTableUtil.SubMatrix(R, num_movies, num_users);
            Theta = DblDataTableUtil.SubMatrix(Theta, num_users, num_features);

            Matrix <double> Y_matrix     = Convert2Matrix(Y);
            Matrix <double> X_matrix     = Convert2Matrix(X);
            Matrix <double> Theta_matrix = Convert2Matrix(Theta);

            int[,] R_matrix = IntDataTableUtil.Convert2DArray(R);

            int dimension = num_movies * num_features + num_users * num_features; //total number of entries in X and Theta

            double[] theta_x = new double[dimension];
            CollaborativeFilteringRSCostFunction.UnrollMatrixIntoVector(Theta_matrix, X_matrix, theta_x);

            CollaborativeFilteringRSCostFunction f = new CollaborativeFilteringRSCostFunction(Y_matrix, R_matrix, num_movies, num_features, dimension);

            f.RegularizationLambda = lambda;
            double J = f.Evaluate(theta_x);

            Console.WriteLine("Cost at loaded parameters: {0} (this value should be about 22.22)", J);
        }
Exemplo n.º 2
0
        public static void Test_CollaborativeFilteringRSCostFunction_CalcGradient(double lambda = 0)
        {
            Matrix <double> X_t     = CreateRandomMatrix(4, 3);
            Matrix <double> Theta_t = CreateRandomMatrix(5, 3);

            Matrix <double> Y = X_t.Multiply(Theta_t.Transpose());

            int[,] R = new int[Y.RowCount, Y.ColumnCount];

            for (int i = 0; i < Y.RowCount; ++i)
            {
                for (int j = 0; j < Y.ColumnCount; ++j)
                {
                    if (mRand.NextDouble() > 0.5)
                    {
                        Y[i, j] = 0;
                    }
                    if (Y[i, j] == 0)
                    {
                        R[i, j] = 0;
                    }
                    else
                    {
                        R[i, j] = 1;
                    }
                }
            }


            Matrix <double> X            = CreateRandomMatrix(4, 3);
            Matrix <double> Theta        = CreateRandomMatrix(5, 3);
            int             num_users    = Y.ColumnCount;
            int             num_movies   = Y.RowCount;
            int             num_features = Theta_t.ColumnCount;

            int dimension = num_movies * num_features + num_users * num_features; //total number of entries in X and Theta

            double[] theta_x = new double[dimension];
            CollaborativeFilteringRSCostFunction.UnrollMatrixIntoVector(Theta, X, theta_x);

            CollaborativeFilteringRSCostFunction f = new CollaborativeFilteringRSCostFunction(Y, R, num_movies, num_features, dimension);

            f.RegularizationLambda = lambda;

            double[] numgrad = new double[dimension];
            double[] grad    = new double[dimension];
            GradientEstimation.CalcGradient(theta_x, numgrad, (x_pi, constraints) =>
            {
                return(f.Evaluate(x_pi));
            });
            f.CalcGradient(theta_x, grad);

            Console.WriteLine("The relative difference will be small:");
            for (int i = 0; i < dimension; ++i)
            {
                Console.WriteLine("{0}\t{1}", numgrad[i], grad[i]);
            }
        }
        public double ComputeCost(List <T> data_set, int num_features)
        {
            int n_m = data_set.Count;

            if (n_m == 0)
            {
                throw new ArgumentException("Data set is empty!");
            }
            int x_dimension = num_features;
            int n_u         = data_set[0].UserRanks.Length;

            Matrix <double> Y_matrix = new SparseMatrix(n_m, n_u, 0);

            int[,] r_matrix = new int[n_m, n_u];

            int theta_dimension   = n_u * x_dimension;
            int content_dimension = n_m * x_dimension;

            int dimension = theta_dimension + content_dimension;

            for (int i = 0; i < n_m; ++i)
            {
                T        rating = data_set[i];
                double[] Y      = rating.UserRanks;
                bool[]   r      = rating.IsRated;

                for (int j = 0; j < n_u; ++j)
                {
                    Y_matrix[i, j] = Y[j];
                    r_matrix[i, j] = r[j] ? 1 : 0;
                }
            }

            CollaborativeFilteringRSCostFunction f = new CollaborativeFilteringRSCostFunction(Y_matrix, r_matrix, n_m, x_dimension, dimension);

            f.RegularizationLambda = 0;

            return(f.Evaluate(mThetaX));
        }
        public Matrix <double> Compute(List <T> data_set, int num_features)
        {
            int n_m = data_set.Count;

            if (n_m == 0)
            {
                throw new ArgumentException("Data set is empty!");
            }
            int n_u = data_set[0].UserRanks.Length;

            Matrix <double> Y_matrix = new SparseMatrix(n_m, n_u, 0);

            int[,] r_matrix = new int[n_m, n_u];

            int theta_dimension   = n_u * num_features;
            int content_dimension = n_m * num_features;

            int dimension = theta_dimension + content_dimension;

            for (int i = 0; i < n_m; ++i)
            {
                T rating = data_set[i];
                rating.ItemIndex = i;
                double[] Y = rating.UserRanks;
                bool[]   r = rating.IsRated;

                for (int j = 0; j < n_u; ++j)
                {
                    Y_matrix[i, j] = Y[j];
                    r_matrix[i, j] = r[j] ? 1 : 0;
                }
            }

            CollaborativeFilteringRSCostFunction f = new CollaborativeFilteringRSCostFunction(Y_matrix, r_matrix, n_m, num_features, dimension);

            f.RegularizationLambda = mRegularizationLambda;
            double[] theta_x_0 = new double[dimension];
            for (int d = 0; d < dimension; ++d)
            {
                theta_x_0[d] = NextRandomDouble();
            }

            ContinuousSolution solution = mLocalSearcher.Minimize(theta_x_0, f, mMaxSolverIteration);

            mThetaX = solution.Values;

            mTheta = new DenseMatrix(n_u, num_features);
            for (int j = 0; j < n_u; ++j)
            {
                for (int k = 0; k < num_features; ++k)
                {
                    int index = j * num_features + k;
                    mTheta[j, k] = mThetaX[index];
                }
            }

            Matrix <double> X = new DenseMatrix(n_m, num_features);

            for (int i = 0; i < n_m; ++i)
            {
                T rec = data_set[i];
                if (rec.X == null)
                {
                    rec.X = new double[num_features];
                }
                for (int k = 0; k < num_features; ++k)
                {
                    int index = i * num_features + k + theta_dimension;
                    rec.X[k] = mThetaX[index];
                    X[i, k]  = mThetaX[index];
                }
            }


            Matrix <double> XThetaPrime = X.Multiply(mTheta.Transpose());

            for (int i = 0; i < n_m; ++i)
            {
                T rec = data_set[i];
                for (int j = 0; j < n_u; ++j)
                {
                    if (r_matrix[i, j] == 0)
                    {
                        rec.UserRanks[j] = XThetaPrime[i, j];
                    }
                }
            }

            return(XThetaPrime);
        }