/// <summary>
        /// Perform one iteration of the mapping learning process
        /// </summary>
        public override void IterateMapping()
        {
            _MapToLatentFactorSpace = __MapToLatentFactorSpace;             // make sure we don't memoize during training

            // stochastic gradient descent
            int item_id = SampleItem();

            float[] est_factors = MapToLatentFactorSpace(item_id);

            for (int j = 0; j < num_factors; j++)
            {
                // TODO do we need an absolute term here???
                double diff = est_factors[j] - item_factors[item_id, j];
                if (diff > 0)
                {
                    foreach (int attribute in item_attributes[item_id])
                    {
                        double w     = attribute_to_factor[attribute, j];
                        double deriv = diff * w + reg_mapping * w;
                        MatrixExtensions.Inc(attribute_to_factor, attribute, j, learn_rate_mapping * -deriv);
                    }
                    // bias term
                    double w_bias     = attribute_to_factor[NumItemAttributes, j];
                    double deriv_bias = diff * w_bias + reg_mapping * w_bias;
                    MatrixExtensions.Inc(attribute_to_factor, NumItemAttributes, j, learn_rate_mapping * -deriv_bias);
                }
            }
        }
Ejemplo n.º 2
0
        ///
        public override void IterateMapping()
        {
            // stochastic gradient descent
            int user_id = SampleUserWithAttributes();

            double[] est_factors = MapUserToLatentFactorSpace(user_attributes[user_id]);

            for (int j = 0; j < num_factors; j++)
            {
                // TODO do we need an absolute term here???
                double diff = est_factors[j] - user_factors[user_id, j];
                if (diff > 0)
                {
                    foreach (int attribute in user_attributes[user_id])
                    {
                        double w     = attribute_to_factor[attribute, j];
                        double deriv = diff * w + reg_mapping * w;
                        MatrixExtensions.Inc(attribute_to_factor, attribute, j, learn_rate_mapping * -deriv);
                    }
                    // bias term
                    double w_bias     = attribute_to_factor[NumUserAttributes, j];
                    double deriv_bias = diff * w_bias + reg_mapping * w_bias;
                    MatrixExtensions.Inc(attribute_to_factor, NumUserAttributes, j, learn_rate_mapping * -deriv_bias);
                }
            }
        }
        [Test()] public void TestScalarProductWithRowDifference()
        {
            var matrix1 = new Matrix <float>(5, 5);

            float[] row = { 1, 2, 3, 4, 5 };
            for (int i = 0; i < 5; i++)
            {
                matrix1.SetRow(i, row);
            }
            var matrix2 = new Matrix <float>(5, 5);

            for (int i = 0; i < 5; i++)
            {
                matrix2.SetRow(i, row);
            }
            var matrix3 = new Matrix <float>(5, 5);

            MatrixExtensions.Inc(matrix3, 1.0f);

            Assert.AreEqual(40, MatrixExtensions.RowScalarProductWithRowDifference(matrix1, 2, matrix2, 3, matrix3, 1));
        }