コード例 #1
0
        /// <summary>Update latent factors according to the stochastic gradient descent update rule</summary>
        /// <param name="u">the user ID</param>
        /// <param name="i">the ID of the first item</param>
        /// <param name="j">the ID of the second item</param>
        /// <param name="update_u">if true, update the user latent factors</param>
        /// <param name="update_i">if true, update the latent factors of the first item</param>
        /// <param name="update_j">if true, update the latent factors of the second item</param>
        protected override void UpdateFactors(int u, int i, int j, bool update_u, bool update_i, bool update_j)
        {
            double x_uij = item_bias[i] - item_bias[j] + MatrixExtensions.RowScalarProductWithRowDifference(user_factors, u, item_factors, i, item_factors, j);

            if (x_uij > 0)
            {
                return;
            }

            // adjust bias terms
            if (update_i)
            {
                double bias_update = 1 - BiasReg * item_bias[i];
                item_bias[i] += (float)(LearnRate * bias_update);
            }

            if (update_j)
            {
                double bias_update = -1 - BiasReg * item_bias[j];
                item_bias[j] += (float)(LearnRate * bias_update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float w_uf = user_factors[u, f];
                float h_if = item_factors[i, f];
                float h_jf = item_factors[j, f];

                if (update_u)
                {
                    double uf_update = h_if - h_jf - RegU * w_uf;
                    user_factors[u, f] = (float)(w_uf + LearnRate * uf_update);
                }

                if (update_i)
                {
                    double if_update = w_uf - RegI * h_if;
                    item_factors[i, f] = (float)(h_if + LearnRate * if_update);
                }

                if (update_j)
                {
                    double jf_update = -w_uf - RegJ * h_jf;
                    item_factors[j, f] = (float)(h_jf + LearnRate * jf_update);
                }
            }
        }
コード例 #2
0
        /// <summary>Update latent factors according to the stochastic gradient descent update rule</summary>
        /// <param name="u">the user ID</param>
        /// <param name="i">the ID of the first item</param>
        /// <param name="j">the ID of the second item</param>
        /// <param name="update_u">if true, update the user latent factors</param>
        /// <param name="update_i">if true, update the latent factors of the first item</param>
        /// <param name="update_j">if true, update the latent factors of the second item</param>
        protected override void UpdateFactors(int u, int i, int j, bool update_u, bool update_i, bool update_j)
        {
            double x_uij = item_bias[i] - item_bias[j] + MatrixExtensions.RowScalarProductWithRowDifference(user_factors, u, item_factors, i, item_factors, j);

            double common_part = x_uij < 0 ? 1 : 0;

            // adjust bias terms
            if (update_i)
            {
                double bias_update = common_part - BiasReg * item_bias[i];
                item_bias[i] += (float)(learn_rate * bias_update);
            }

            if (update_j)
            {
                double bias_update = -common_part - BiasReg * item_bias[j];
                item_bias[j] += (float)(learn_rate * bias_update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float w_uf = user_factors[u, f];
                float h_if = item_factors[i, f];
                float h_jf = item_factors[j, f];

                if (update_u)
                {
                    double uf_update = (h_if - h_jf) * common_part - reg_u * w_uf;
                    user_factors[u, f] = (float)(w_uf + learn_rate * uf_update);
                }

                if (update_i)
                {
                    double if_update = w_uf * common_part - reg_i * h_if;
                    item_factors[i, f] = (float)(h_if + learn_rate * if_update);
                }

                if (update_j)
                {
                    double jf_update = -w_uf * common_part - reg_j * h_jf;
                    item_factors[j, f] = (float)(h_jf + learn_rate * jf_update);
                }
            }
        }
コード例 #3
0
        [Test()] public void TestScalarProductWithRowDifference()
        {
            var matrix1 = new Matrix <float>(5, 5);

            float[] row = { 1, 2, 3, 4, 5 };
            for (int i = 0; i < 5; i++)
            {
                matrix1.SetRow(i, row);
            }
            var matrix2 = new Matrix <float>(5, 5);

            for (int i = 0; i < 5; i++)
            {
                matrix2.SetRow(i, row);
            }
            var matrix3 = new Matrix <float>(5, 5);

            MatrixExtensions.Inc(matrix3, 1.0f);

            Assert.AreEqual(40, MatrixExtensions.RowScalarProductWithRowDifference(matrix1, 2, matrix2, 3, matrix3, 1));
        }
コード例 #4
0
        protected override void UpdateFactors(int user_id, int item_id, int other_item_id, bool update_u, bool update_i, bool update_j)
        {
            // used by WrapRec-based logic
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);

            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
            {
                features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation).NormalizeSumToOne(Normalize).ToList();
            }

            double item_bias_diff = item_bias[item_id] - item_bias[other_item_id];

            int   g_u     = 0; //FeatureGroups[user_id];
            int   g_i     = 1; //FeatureGroups[item_id];
            float alpha_u = weights[g_u];
            float alpha_i = weights[g_i];

            double u_i_term = MatrixExtensions.RowScalarProductWithRowDifference(
                user_factors, user_id, item_factors, item_id, item_factors, other_item_id);

            double y_uij = item_bias_diff + alpha_u * alpha_i * u_i_term;

            double items_z_term_sum = 0;

            double[] items_z_terms = new double[features.Count];
            double[] group_z_terms = new double[NumGroups - 2];
            int      z             = 0;

            foreach (var feat in features)
            {
                int   g_z     = FeatureGroups[feat.Item1];
                float alpha_z = weights[g_z];
                items_z_terms[z] = feat.Item2 * MatrixExtensions.RowScalarProductWithRowDifference(
                    feature_factors, feat.Item1, item_factors, item_id, item_factors, other_item_id);
                group_z_terms[g_z - 2] += items_z_terms[z];
                items_z_term_sum       += alpha_z * items_z_terms[z];
                z++;
            }
            y_uij += alpha_i * items_z_term_sum;

            double exp     = Math.Exp(y_uij);
            double sigmoid = 1 / (1 + exp);

            // adjust bias terms
            if (update_i)
            {
                double update = sigmoid - BiasReg * item_bias[item_id];
                item_bias[item_id] += (float)(learn_rate * update);
            }

            if (update_j)
            {
                double update = -sigmoid - BiasReg * item_bias[other_item_id];
                item_bias[other_item_id] += (float)(learn_rate * update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float v_uf = user_factors[user_id, f];
                float v_if = item_factors[item_id, f];
                float v_jf = item_factors[other_item_id, f];

                if (update_u)
                {
                    double update = alpha_u * alpha_i * (v_if - v_jf) * sigmoid - reg_u * v_uf;
                    user_factors[user_id, f] = (float)(v_uf + learn_rate * update);
                }

                // update features latent factors and make a sum term to use later for updating item factors
                // sum = Sum_{l=1}{num_features} c_l * v_{c_l,f}
                float sum = 0f;

                foreach (var feat in features)
                {
                    float v_zf    = feature_factors[feat.Item1, f];
                    float x_z     = feat.Item2;
                    int   g_z     = FeatureGroups[feat.Item1];
                    float alpha_z = weights[g_z];

                    sum += x_z * v_zf * alpha_z;

                    double update = alpha_i * alpha_z * x_z * (v_if - v_jf) * sigmoid - reg_c * v_zf;
                    feature_factors[feat.Item1, f] = (float)(v_zf + learn_rate * update);
                }

                if (update_i)
                {
                    double update = (alpha_u * alpha_i * v_uf + alpha_i * sum) * sigmoid - reg_i * v_if;
                    item_factors[item_id, f] = (float)(v_if + learn_rate * update);
                }

                if (update_j)
                {
                    double update = (alpha_u * alpha_i * -v_uf - alpha_i * sum) * sigmoid - reg_j * v_jf;
                    item_factors[other_item_id, f] = (float)(v_jf + learn_rate * update);
                }
            }

            // update alphas
            double update_alpha_u = alpha_i * u_i_term * sigmoid - reg_w * alpha_u;

            weights[g_u] = (float)(alpha_u + learn_rate * update_alpha_u);

            //NormalizeWeights();

            double update_alpha_i = (alpha_u * u_i_term + items_z_term_sum) * sigmoid - reg_w * alpha_i;

            weights[g_i] = (float)(alpha_i + learn_rate * update_alpha_i);

            for (int g = 0; g < NumGroups - 2; g++)
            {
                double alpha_z_g        = weights[g + 2];
                double update_alpha_z_g = alpha_i * group_z_terms[g] * sigmoid - reg_w * alpha_z_g;
                weights[g + 2] = (float)(alpha_z_g + learn_rate * update_alpha_z_g);
            }

            // normalize weights
            NormalizeWeights();
        }
コード例 #5
0
ファイル: BPRFM.cs プロジェクト: babakx/WrapRec.Extensions
        protected override void UpdateFactors(int user_id, int item_id, int other_item_id, bool update_u, bool update_i, bool update_j)
        {
            // used by WrapRec-based logic
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);

            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
            {
                features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation).NormalizeSumToOne(Normalize).ToList();
            }

            double item_bias_diff = item_bias[item_id] - item_bias[other_item_id];

            double y_uij = item_bias_diff + MatrixExtensions.RowScalarProductWithRowDifference(
                user_factors, user_id, item_factors, item_id, item_factors, other_item_id);

            foreach (var feat in features)
            {
                y_uij += feat.Item2 * MatrixExtensions.RowScalarProductWithRowDifference(
                    feature_factors, feat.Item1, item_factors, item_id, item_factors, other_item_id);
            }

            double exp     = Math.Exp(y_uij);
            double sigmoid = 1 / (1 + exp);

            // adjust bias terms
            if (update_i)
            {
                // TODO: check why -Bias
                double update = sigmoid - BiasReg * item_bias[item_id];
                item_bias[item_id] += (float)(learn_rate * update);
            }

            if (update_j)
            {
                double update = -sigmoid - BiasReg * item_bias[other_item_id];
                item_bias[other_item_id] += (float)(learn_rate * update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float v_uf = user_factors[user_id, f];
                float v_if = item_factors[item_id, f];
                float v_jf = item_factors[other_item_id, f];

                if (update_u)
                {
                    double update = (v_if - v_jf) * sigmoid - reg_u * v_uf;
                    user_factors[user_id, f] = (float)(v_uf + learn_rate * update);
                }

                // update features latent factors and make a sum term to use later for updating item factors
                // sum = Sum_{l=1}{num_features} c_l * v_{c_l,f}
                float sum = 0f;

                foreach (var feat in features)
                {
                    float v_zf = feature_factors[feat.Item1, f];
                    float x_z  = feat.Item2;

                    sum += x_z * v_zf;

                    double update = x_z * (v_if - v_jf) * sigmoid - reg_c * v_zf;
                    feature_factors[feat.Item1, f] = (float)(v_zf + learn_rate * update);
                }

                if (update_i)
                {
                    double update = (v_uf + sum) * sigmoid - reg_i * v_if;
                    item_factors[item_id, f] = (float)(v_if + learn_rate * update);
                }

                if (update_j)
                {
                    double update = (-v_uf - sum) * sigmoid - reg_j * v_jf;
                    item_factors[other_item_id, f] = (float)(v_jf + learn_rate * update);
                }
            }
        }