Example #1
0
        // this method ingores the properties of the baseClass: WithReplacement and UniformUserSampling
        public override void Iterate()
        {
            int time = (int)Wrap.MeasureTime(delegate()
            {
                for (int i = 0; i < Feedback.Count; i++)
                {
                    if (UnobservedNegSampler == UnobservedNegSampler.Dynamic &&
                        i % (AllItems.Count * Math.Log(AllItems.Count)) == 0)
                    {
                        UpdateDynamicSampler();
                    }

                    var pos = SamplePosFeedback();
                    var neg = SampleNegFeedback(pos);

                    int user_id       = UsersMap.ToInternalID(pos.User.Id);
                    int item_id       = ItemsMap.ToInternalID(pos.Item.Id);
                    int other_item_id = ItemsMap.ToInternalID(neg.Item.Id);

                    UpdateFactors(user_id, item_id, other_item_id, true, true, update_j);
                }

                if (PosSampler == PosSampler.AdaptedWeight)
                {
                    UpdatePosSampler();
                }
            }).TotalMilliseconds;

            Model.OnIterate(this, time);
        }
Example #2
0
        public virtual float Predict(Feedback feedback)
        {
            int userId = UsersMap.ToInternalID(feedback.User.Id);
            int itemId = ItemsMap.ToInternalID(feedback.Item.Id);
            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (!IgnoreFeaturesOnPrediction)
            {
                features = feedback.GetAllAttributes().Select(a => FeatureBuilder.TranslateAttribute(a)).NormalizeSumToOne().ToList();
            }

            bool newUser = (userId > MaxUserID);
            bool newItem = (itemId > MaxItemID);

            float userAttrsTerm = 0, itemAttrsTerm = 0;

            foreach (var feat in features)
            {
                // if feat_index is greater than MaxFeatureId it means that the feature is new in test set so its factors has not been learnt
                if (feat.Item1 < NumTrainFeaturs)
                {
                    float x_z = feat.Item2;

                    userAttrsTerm += newUser ? 0 : x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, user_factors, userId);

                    itemAttrsTerm += newItem ? 0 : x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, item_factors, itemId);
                }
            }

            float itemBias     = newItem ? 0 : item_bias[itemId];
            float userItemTerm = (newUser || newItem) ? 0 : MatrixExtensions.RowScalarProduct(user_factors, userId, item_factors, itemId);

            return(itemBias + userItemTerm + userAttrsTerm + itemAttrsTerm);
        }
        internal override void Reload(RecyclerViewObservableAdapterBase adapter)
        {
            var groupedAdapter = (RecyclerViewObservableGroupedAdapterBase)adapter;

            ItemsMap.Clear();
            ItemsMap.Add(ItemMap.CreateForHeader());

            var sectionsCount = groupedAdapter.GetSectionsCount();

            for (var section = 0; section < sectionsCount; section++)
            {
                ItemsMap.Add(ItemMap.CreateForSectionHeader(groupedAdapter.GetItemsGroup(section)?.Key));

                var sectionItemsCount = groupedAdapter.GetSectionItemsCount(section);

                for (var row = 0; row < sectionItemsCount; row++)
                {
                    ItemsMap.Add(ItemMap.CreateForItem(groupedAdapter.GetItem(section, row)));
                }

                ItemsMap.Add(ItemMap.CreateForSectionFooter(groupedAdapter.GetItemsGroup(section)?.Key));
            }

            ItemsMap.Add(ItemMap.CreateForFooter());

            groupedAdapter.NotifyDataSetChanged();
        }
Example #4
0
        internal override void Reload(RecyclerViewObservableAdapter adapter)
        {
            ItemsMap.Clear();
            ItemsMap.Add(ItemMap.CreateForHeader());

            var sectionsCount = adapter.GetSectionsCount();

            for (var section = 0; section < sectionsCount; section++)
            {
                ItemsMap.Add(ItemMap.CreateForSectionHeader(null));

                var sectionItemsCount = adapter.GetSectionItemsCount(section);

                for (var row = 0; row < sectionItemsCount; row++)
                {
                    ItemsMap.Add(ItemMap.CreateForItem(adapter.GetItem(section, row)));
                }

                ItemsMap.Add(ItemMap.CreateForSectionFooter(null));
            }

            ItemsMap.Add(ItemMap.CreateForFooter());

            adapter.NotifyDataSetChanged();
        }
Example #5
0
        public override float Predict(int user_id, int item_id)
        {
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);
            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (!IgnoreFeaturesOnPrediction && Split.Container.FeedbacksDic.ContainsKey(userIdOrg, itemIdOrg))
            {
                var feedback = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg];
                features = feedback.GetAllAttributes().Select(a => FeatureBuilder.TranslateAttribute(a)).NormalizeSumToOne(Normalize).ToList();
            }

            bool newUser = (user_id > MaxUserID);
            bool newItem = (item_id > MaxItemID);

            float userAttrsTerm = 0, itemAttrsTerm = 0;

            foreach (var feat in features)
            {
                // if feat_index is greater than MaxFeatureId it means that the feature is new in test set so its factors has not been learnt
                if (feat.Item1 < NumTrainFeaturs)
                {
                    float x_z = feat.Item2;

                    userAttrsTerm += newUser ? 0 : x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, user_factors, user_id);

                    itemAttrsTerm += newItem ? 0 : x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, item_factors, item_id);
                }
            }

            float itemBias     = newItem ? 0 : item_bias[item_id];
            float userItemTerm = (newUser || newItem) ? 0 : MatrixExtensions.RowScalarProduct(user_factors, user_id, item_factors, item_id);

            return(itemBias + userItemTerm + userAttrsTerm + itemAttrsTerm);
        }
Example #6
0
        public virtual Feedback SampleUnobservedNegFeedback(Feedback posFeedback)
        {
            Feedback neg = null;

            switch (UnobservedNegSampler)
            {
            case UnobservedNegSampler.UniformFeedback:
                do
                {
                    neg = TrainFeedback[random.Next(TrainFeedback.Count)];
                } while (neg.User == posFeedback.User);
                break;

            case UnobservedNegSampler.DynamicLevel:
                do
                {
                    int l = PosLevels[_posLevelSampler.Sample()];
                    int i = random.Next(LevelPosFeedback[l].Count);
                    neg = LevelPosFeedback[l][i];
                } while (neg.User == posFeedback.User);
                break;

            case UnobservedNegSampler.UniformItem:
            {
                string itemId;
                int    user_id, item_id;
                do
                {
                    itemId  = AllItems[random.Next(AllItems.Count)];
                    item_id = ItemsMap.ToInternalID(itemId);
                    user_id = UsersMap.ToInternalID(posFeedback.User.Id);
                    //} while (UserFeedback[posFeedback.User.Id].Select(f => f.Item.Id).Contains(itemId));
                } while (Feedback.UserMatrix[user_id, item_id] == true);
                neg = new Feedback(posFeedback.User, Split.Container.Items[itemId]);
            }
            break;

            case UnobservedNegSampler.Dynamic:
            {
                string negItemId;
                int    user_id, item_id;
                do
                {
                    negItemId = SampleNegItemDynamic(posFeedback);
                    item_id   = ItemsMap.ToInternalID(negItemId);
                    user_id   = UsersMap.ToInternalID(posFeedback.User.Id);
                    //} while (UserFeedback[posFeedback.User.Id].Select(f => f.Item.Id).Contains(negItemId));
                } while (Feedback.UserMatrix[user_id, item_id] == true);
                neg = new Feedback(posFeedback.User, Split.Container.Items[negItemId]);
            }
            break;

            default:
                break;
            }

            NumUnobservedNeg++;
            return(neg);
        }
Example #7
0
 protected virtual void UpdateDynamicSampler()
 {
     for (int f = 0; f < NumFactors; f++)
     {
         _factorBasedRank[f]  = AllItems.OrderByDescending(iId => item_factors[ItemsMap.ToInternalID(iId), f]).ToList();
         _itemFactorsStdev[f] = AllItems.Select(iId => item_factors[ItemsMap.ToInternalID(iId), f]).Stdev();
     }
 }
        internal void RemoveGroups([NotNull] NotifyCollectionChangedEventArgs args, [NotNull] RecyclerView.Adapter adapter)
        {
            var oldStartingIndex = GetSectionPlainOffset(args.OldStartingIndex);
            var count            = GetSectionsLength(args.OldItems.NotNull());

            ItemsMap.RemoveRange(oldStartingIndex, count);

            adapter.NotifyItemRangeRemoved(oldStartingIndex, count);
        }
        public async Task <Item> GetItemAsync(int itemId)
        {
            if (ItemsMap == null || ItemsMap.Count == 0)
            {
                await SetUpItemsMapAsync();
            }

            ItemsMap.TryGetValue(itemId, out var item);

            return(item);
        }
        public override void Train(Split split)
        {
            var mmlInstance = (FM)MmlRecommenderInstance;
            var featBuilder = new FmFeatureBuilder();

            var wFm = MmlRecommenderInstance as WeightedBPRFM;

            if (DataType == WrapRec.IO.DataType.Ratings)
            {
                var mmlFeedback = new Ratings();
                foreach (var feedback in split.Train)
                {
                    var rating = (Rating)feedback;
                    mmlFeedback.Add(UsersMap.ToInternalID(rating.User.Id), ItemsMap.ToInternalID(rating.Item.Id), rating.Value);

                    // the attributes are translated so that they can be used later for training
                    foreach (var attr in feedback.GetAllAttributes())
                    {
                        attr.Translation = featBuilder.TranslateAttribute(attr);
                        // hard code attribute group. User is 0, item is 1, others is 2
                        attr.Group = 2;
                        if (wFm != null && !wFm.FeatureGroups.ContainsKey(attr.Translation.Item1))
                        {
                            wFm.FeatureGroups.Add(attr.Translation.Item1, 2);
                        }
                    }
                }
                ((IRatingPredictor)MmlRecommenderInstance).Ratings = mmlFeedback;
            }

            foreach (var feedback in split.Test)
            {
                // the attributes are translated so that they can be used later for training
                foreach (var attr in feedback.GetAllAttributes())
                {
                    attr.Translation = featBuilder.TranslateAttribute(attr);
                    // hard code attribute group. User is 0, item is 1, others is 2
                    attr.Group = 2;
                    if (wFm != null && !wFm.FeatureGroups.ContainsKey(attr.Translation.Item1))
                    {
                        wFm.FeatureGroups.Add(attr.Translation.Item1, 2);
                    }
                }
            }

            mmlInstance.Split          = split;
            mmlInstance.Model          = this;
            mmlInstance.UsersMap       = UsersMap;
            mmlInstance.ItemsMap       = ItemsMap;
            mmlInstance.FeatureBuilder = featBuilder;

            Logger.Current.Trace("Training with MmlFmRecommender recommender...");
            PureTrainTime = (int)Wrap.MeasureTime(delegate() { mmlInstance.Train(); }).TotalMilliseconds;
        }
Example #11
0
        // This method makes sure that all itemIds are already have an internalId when they want to be used in evaluation
        // this prevent cross-thread access to ItemMap (already existing key in dictionary error)
        // when evaluation is peformed in parallel for each user
        private void ExhaustInternalIds(Split split)
        {
            foreach (var item in split.Container.Items.Values)
            {
                ItemsMap.ToInternalID(item.Id);
            }

            foreach (var user in split.Container.Items.Values)
            {
                UsersMap.ToInternalID(user.Id);
            }
        }
Example #12
0
        public override void Train(Split split)
        {
            var mmlInstance = (BPRFM)MmlRecommenderInstance;
            var featBuilder = new FmFeatureBuilder();
            var mmlFeedback = new PosOnlyFeedback <SparseBooleanMatrix>();

            var wBprFm = MmlRecommenderInstance as WeightedBPRFM;

            foreach (var feedback in split.Train)
            {
                mmlFeedback.Add(UsersMap.ToInternalID(feedback.User.Id), ItemsMap.ToInternalID(feedback.Item.Id));

                // the attributes are translated so that they can be used later for training
                foreach (var attr in feedback.GetAllAttributes())
                {
                    attr.Translation = featBuilder.TranslateAttribute(attr);
                    // hard code attribute group. User is 0, item is 1, others is 2
                    attr.Group = 2;
                    if (wBprFm != null && !wBprFm.FeatureGroups.ContainsKey(attr.Translation.Item1))
                    {
                        wBprFm.FeatureGroups.Add(attr.Translation.Item1, 2);
                    }
                }
            }

            foreach (var feedback in split.Test)
            {
                // the attributes are translated so that they can be used later for training
                foreach (var attr in feedback.GetAllAttributes())
                {
                    attr.Translation = featBuilder.TranslateAttribute(attr);
                    // hard code attribute group. User is 0, item is 1, others is 2
                    attr.Group = 2;
                    if (wBprFm != null && !wBprFm.FeatureGroups.ContainsKey(attr.Translation.Item1))
                    {
                        wBprFm?.FeatureGroups.Add(attr.Translation.Item1, 2);
                    }
                }
            }

            mmlInstance.Feedback       = mmlFeedback;
            mmlInstance.Split          = split;
            mmlInstance.Model          = this;
            mmlInstance.UsersMap       = UsersMap;
            mmlInstance.ItemsMap       = ItemsMap;
            mmlInstance.FeatureBuilder = featBuilder;

            Logger.Current.Trace("Training with MmlBprfmRecommender recommender...");
            PureTrainTime = (int)Wrap.MeasureTime(delegate() { mmlInstance.Train(); }).TotalMilliseconds;
        }
Example #13
0
        public override void Train(Split split)
        {
            // Convert trainset to MyMediaLite trianset format
            if (DataType == IO.DataType.Ratings)
            {
                var mmlFeedback = new Ratings();
                foreach (var feedback in split.Train)
                {
                    var rating = (Rating)feedback;
                    mmlFeedback.Add(UsersMap.ToInternalID(rating.User.Id), ItemsMap.ToInternalID(rating.Item.Id), rating.Value);
                }
                ((IRatingPredictor)MmlRecommenderInstance).Ratings = mmlFeedback;
            }
            else if (DataType == IO.DataType.TimeAwareRating)
            {
                var mmlFeedback      = new TimedRatings();
                var firstRatingMl10M = new DateTime(1998, 11, 1);

                foreach (var feedback in split.Train)
                {
                    var rating = (Rating)feedback;
                    var time   = firstRatingMl10M.AddDays(double.Parse(feedback.Attributes["timestamp"].Value));
                    mmlFeedback.Add(UsersMap.ToInternalID(rating.User.Id), ItemsMap.ToInternalID(rating.Item.Id),
                                    rating.Value, time);
                }
                ((ITimeAwareRatingPredictor)MmlRecommenderInstance).Ratings = mmlFeedback;
            }
            else
            {
                var mmlFeedback = new PosOnlyFeedback <SparseBooleanMatrix>();
                foreach (var feedback in split.Train)
                {
                    mmlFeedback.Add(UsersMap.ToInternalID(feedback.User.Id), ItemsMap.ToInternalID(feedback.Item.Id));
                }
                ((ItemRecommender)MmlRecommenderInstance).Feedback = mmlFeedback;

                if (MmlRecommenderInstance is IModelAwareRecommender)
                {
                    ((IModelAwareRecommender)MmlRecommenderInstance).Model = this;
                }
            }

            Logger.Current.Trace("Training with MyMediaLite recommender...");
            PureTrainTime = (int)Wrap.MeasureTime(delegate() { MmlRecommenderInstance.Train(); }).TotalMilliseconds;
        }
Example #14
0
        protected virtual void UpdatePosSampler()
        {
            double[] levelsAvg = new double[PosLevels.Count];
            for (int i = 0; i < PosLevels.Count; i++)
            {
                foreach (Feedback f in LevelPosFeedback[PosLevels[i]])
                {
                    int user_id = UsersMap.ToInternalID(f.User.Id);
                    int item_id = ItemsMap.ToInternalID(f.Item.Id);

                    levelsAvg[i] += MatrixExtensions.RowScalarProduct(user_factors, user_id, item_factors, item_id);
                }
                //Console.WriteLine(levelsAvg[i]);
                levelsAvg[i] /= LevelPosFeedback[PosLevels[i]].Count;
            }

            double avgSum = levelsAvg.Sum();

            double[] levelWeights = new double[PosLevels.Count];

            for (int i = 0; i < PosLevels.Count; i++)
            {
                levelWeights[i] = levelsAvg[i] / avgSum;
            }

            double sum = 0;

            for (int i = 0; i < PosLevels.Count; i++)
            {
                sum += levelWeights[i] * LevelPosFeedback[PosLevels[i]].Count;
            }

            double[] levelPros = new double[PosLevels.Count];
            for (int i = 0; i < PosLevels.Count; i++)
            {
                levelPros[i] = levelWeights[i] * LevelPosFeedback[PosLevels[i]].Count / sum;
            }

            string weights = levelWeights.Select(p => string.Format("{0:0.00}", p)).Aggregate((a, b) => a + " " + b);

            Logger.Current.Info(weights);
            //var temp = SampledCount.Values.Take(10).Select(i => i.ToString()).Aggregate((a, b) => a + " " + b);
            //Console.WriteLine(temp);
            _posLevelSampler = new Categorical(levelPros);
        }
Example #15
0
        public override void Insert(Int32 index, Object item)
        {
            if (!(item is ListViewItem lvitem))
            {
                lvitem = ItemsMap.TryGetValue(item, new ListViewItem(item.ToString()));

                if (item is FSWatcher watcher)
                {
                    lvitem.Tag = watcher;
                }
            }

            if (!(lvitem.Tag is FSWatcher))
            {
                lvitem.Tag = new FSWatcher(lvitem.Text);
            }

            base.Insert(index, lvitem);
        }
        public async Task SetUpItemsMapAsync()
        {
            await _itemMapSetUpSemaphore.WaitAsync();

            try
            {
                using var factory = _dbContextHelper.GetFactory();
                var dbContext = factory.GetDbContext();

                var storeItems = await dbContext.Items.ToListAsync();

                foreach (var itemId in ItemsMap.Keys)
                {
                    if (!storeItems.Any(i => i.Id == itemId))
                    {
                        ItemsMap.TryRemove(itemId, out var removedItem);
                    }
                }

                foreach (var item in storeItems)
                {
                    if (ItemsMap.ContainsKey(item.Id))
                    {
                        ItemsMap[item.Id] = item;
                    }
                    else
                    {
                        ItemsMap.TryAdd(item.Id, item);
                    }
                }
            }
            catch (Exception ex)
            {
                _logger.LogError($"Error setting up Items Map: {ex}");
            }
            finally
            {
                _itemMapSetUpSemaphore.Release();
            }
        }
Example #17
0
        internal void AddGroups([NotNull] NotifyCollectionChangedEventArgs args, [NotNull] RecyclerView.Adapter adapter)
        {
            var newStartingIndex = GetSectionPlainOffset(args.NewStartingIndex);
            var count            = 0;

            foreach (IGrouping <object, object> itemsGroup in args.NewItems.NotNull())
            {
                ItemsMap.Insert(newStartingIndex + count++, ItemMap.CreateForSectionHeader(itemsGroup?.Key));

                if (itemsGroup != null)
                {
                    foreach (var item in itemsGroup)
                    {
                        ItemsMap.Insert(newStartingIndex + count++, ItemMap.CreateForItem(item));
                    }
                }

                ItemsMap.Insert(newStartingIndex + count++, ItemMap.CreateForSectionFooter(itemsGroup?.Key));
            }

            adapter.NotifyItemRangeInserted(newStartingIndex, count);
        }
        public override float Predict(Feedback feedback)
        {
            int userId  = UsersMap.ToInternalID(feedback.User.Id);
            int itemId  = ItemsMap.ToInternalID(feedback.Item.Id);
            var featurs = feedback.GetAllAttributes().Select(a => FeatureBuilder.TranslateAttribute(a));

            bool newUser = (userId > MaxUserID);
            bool newItem = (itemId > MaxItemID);

            float userAttrsTerm = 0, itemAttrsTerm = 0;

            foreach (var feat in featurs)
            {
                // if feat_index is greater than MaxFeatureId it means that the feature is new in test set so its factors has not been learnt
                if (feat.Item1 < NumTrainFeaturs)
                {
                    float x_z     = feat.Item2;
                    int   g_z     = FeatureGroups[feat.Item1];
                    float alpha_z = weights[g_z];

                    userAttrsTerm += newUser ? 0 : alpha_z *x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, user_factors, userId);

                    itemAttrsTerm += newItem ? 0 : alpha_z *x_z *MatrixExtensions.RowScalarProduct(feature_factors, feat.Item1, item_factors, itemId);
                }
            }

            int   u       = 0;
            int   i       = 1;
            float alpha_u = weights[u];
            float alpha_i = weights[i];

            float itemBias     = newItem ? 0 : item_bias[itemId];
            float userItemTerm = (newUser || newItem) ? 0 : alpha_u *alpha_i *MatrixExtensions.RowScalarProduct(user_factors, userId, item_factors, itemId);

            return(itemBias + userItemTerm + alpha_u * userAttrsTerm + alpha_i * itemAttrsTerm);
        }
Example #19
0
        protected override void UpdateFactors(int user_id, int item_id, int other_item_id, bool update_u, bool update_i, bool update_j)
        {
            // used by WrapRec-based logic
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);

            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
            {
                features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation).NormalizeSumToOne(Normalize).ToList();
            }

            double item_bias_diff = item_bias[item_id] - item_bias[other_item_id];

            double y_uij = item_bias_diff + MatrixExtensions.RowScalarProductWithRowDifference(
                user_factors, user_id, item_factors, item_id, item_factors, other_item_id);

            foreach (var feat in features)
            {
                y_uij += feat.Item2 * MatrixExtensions.RowScalarProductWithRowDifference(
                    feature_factors, feat.Item1, item_factors, item_id, item_factors, other_item_id);
            }

            double exp     = Math.Exp(y_uij);
            double sigmoid = 1 / (1 + exp);

            // adjust bias terms
            if (update_i)
            {
                // TODO: check why -Bias
                double update = sigmoid - BiasReg * item_bias[item_id];
                item_bias[item_id] += (float)(learn_rate * update);
            }

            if (update_j)
            {
                double update = -sigmoid - BiasReg * item_bias[other_item_id];
                item_bias[other_item_id] += (float)(learn_rate * update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float v_uf = user_factors[user_id, f];
                float v_if = item_factors[item_id, f];
                float v_jf = item_factors[other_item_id, f];

                if (update_u)
                {
                    double update = (v_if - v_jf) * sigmoid - reg_u * v_uf;
                    user_factors[user_id, f] = (float)(v_uf + learn_rate * update);
                }

                // update features latent factors and make a sum term to use later for updating item factors
                // sum = Sum_{l=1}{num_features} c_l * v_{c_l,f}
                float sum = 0f;

                foreach (var feat in features)
                {
                    float v_zf = feature_factors[feat.Item1, f];
                    float x_z  = feat.Item2;

                    sum += x_z * v_zf;

                    double update = x_z * (v_if - v_jf) * sigmoid - reg_c * v_zf;
                    feature_factors[feat.Item1, f] = (float)(v_zf + learn_rate * update);
                }

                if (update_i)
                {
                    double update = (v_uf + sum) * sigmoid - reg_i * v_if;
                    item_factors[item_id, f] = (float)(v_if + learn_rate * update);
                }

                if (update_j)
                {
                    double update = (-v_uf - sum) * sigmoid - reg_j * v_jf;
                    item_factors[other_item_id, f] = (float)(v_jf + learn_rate * update);
                }
            }
        }
Example #20
0
        protected void _Iterate(IList <int> rating_indices, bool update_user, bool update_item)
        {
            foreach (int index in rating_indices)
            {
                int u = ratings.Users[index];
                int i = ratings.Items[index];

                int   g_u     = 0; //FeatureGroups[user_id];
                int   g_i     = 1; //FeatureGroups[item_id];
                float alpha_u = weights[g_u];
                float alpha_i = weights[g_i];

                // used by WrapRec-based logic
                string userIdOrg = UsersMap.ToOriginalID(u);
                string itemIdOrg = ItemsMap.ToOriginalID(i);

                List <Tuple <int, float> > features = new List <Tuple <int, float> >();
                if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
                {
                    features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation)
                               .NormalizeSumToOne(Normalize).ToList();
                }

                var p = Predict(u, i);

                float err = (p - ratings[index]) * 2;

                float sum_u = 0, sum_i = 0;
                foreach (var feature in features)
                {
                    int   j       = feature.Item1;
                    float x_j     = feature.Item2;
                    int   g_z     = 2; // FeatureGroups[feature.Item1];
                    float alpha_z = weights[g_z];

                    sum_u += x_j * alpha_z * MatrixExtensions.RowScalarProduct(user_factors, u, feature_factors, j);
                    sum_i += x_j * alpha_z * MatrixExtensions.RowScalarProduct(item_factors, i, feature_factors, j);
                }
                float ui = MatrixExtensions.RowScalarProduct(item_factors, i, user_factors, u);
                sum_u += alpha_i * ui;
                sum_i += alpha_u * ui;

                float sum_z      = 0;
                float sum_z_bias = 0;
                for (int z = 0; z < features.Count; z++)
                {
                    int   z_ix    = features[z].Item1;
                    float x_z     = features[z].Item2;
                    float sum_j   = 0;
                    int   g_z     = 2; // FeatureGroups[z_ix];
                    float alpha_z = weights[g_z];
                    for (int j = z + 1; j < features.Count; j++)
                    {
                        int   j_ix = features[j].Item1;
                        float x_j  = features[j].Item2;
                        sum_j += x_j * MatrixExtensions.RowScalarProduct(feature_factors, z_ix, feature_factors, j_ix);
                    }
                    sum_z      += 2 * alpha_z * x_z * sum_j;
                    sum_z      += x_z * alpha_u * MatrixExtensions.RowScalarProduct(feature_factors, z_ix, user_factors, u);
                    sum_z      += x_z * alpha_i * MatrixExtensions.RowScalarProduct(feature_factors, z_ix, item_factors, i);
                    sum_z_bias += x_z * feature_biases[z_ix];
                }


                float[] sum = new float[NumFactors];
                foreach (var feature in features)
                {
                    int   j       = feature.Item1;
                    float x_j     = feature.Item2;
                    int   g_z     = 2; //FeatureGroups[feature.Item1];
                    float alpha_z = weights[g_z];

                    for (int f = 0; f < NumFactors; f++)
                    {
                        sum[f] += feature_factors[j, f] * x_j * alpha_z;
                    }
                }

                for (int f = 0; f < NumFactors; f++)
                {
                    sum[f] += user_factors[u, f] * alpha_u + item_factors[i, f] * alpha_i;
                }

                // adjust biases
                global_bias -= current_learnrate * (err + RegB * global_bias);

                if (update_user)
                {
                    user_bias[u] -= current_learnrate * (err * alpha_u + RegU * user_bias[u]);
                }
                if (update_item)
                {
                    item_bias[i] -= current_learnrate * (err * alpha_i + RegI * item_bias[i]);
                }

                foreach (var feature in features)
                {
                    int   j       = feature.Item1;
                    float x_j     = feature.Item2;
                    float w_j     = feature_biases[j];
                    int   g_z     = 2; // FeatureGroups[feature.Item1];
                    float alpha_z = weights[g_z];

                    feature_biases[j] -= current_learnrate * (x_j * alpha_z * err + RegC * w_j);
                }

                // adjust latent factors
                for (int f = 0; f < NumFactors; f++)
                {
                    double v_uf = user_factors[u, f];
                    double v_if = item_factors[i, f];

                    if (update_user)
                    {
                        double delta_u = alpha_u * (sum[f] - v_uf * alpha_u) * err + RegU * v_uf;
                        user_factors.Inc(u, f, -current_learnrate * delta_u);
                    }
                    if (update_item)
                    {
                        double delta_i = alpha_i * (sum[f] - v_if * alpha_i) * err + RegI * v_if;
                        item_factors.Inc(i, f, -current_learnrate * delta_i);
                    }

                    foreach (var feature in features)
                    {
                        int   j       = feature.Item1;
                        float x_j     = feature.Item2;
                        float v_jf    = feature_factors[j, f];
                        int   g_z     = 2; // FeatureGroups[feature.Item1];
                        float alpha_z = weights[g_z];

                        double delta_j = x_j * alpha_z * (sum[f] - v_jf * x_j * alpha_z) * err + RegC * v_jf;
                        feature_factors.Inc(j, f, -current_learnrate * delta_j);
                    }
                }

                // update alphas
                float update_alpha_u = (user_bias[u] + sum_u) * err + reg_w * weights[g_u];
                weights[g_u] -= current_learnrate * update_alpha_u;

                float update_alpha_i = (item_bias[i] + sum_i) * err + reg_w * weights[g_i];
                weights[g_i] -= current_learnrate * update_alpha_i;

                for (int g = 0; g < NumGroups - 2; g++)
                {
                    float alpha_z_g      = weights[g + 2];
                    float update_alpha_z = (sum_z + sum_z_bias) * err + reg_w * alpha_z_g;
                    weights[g + 2] -= current_learnrate * update_alpha_z;
                }


                NormalizeWeights();
            }

            Console.WriteLine($"alpha_u: {weights[0]}, alpha_i: {weights[1]}" + (weights.Length > 2 ? $", alpha_z: {weights[2]}" : ""));
            //_alphaWriter.WriteLine(++_iter + "," + weights[0] + "," + weights[1] + (weights.Length > 2 ? "," + weights[2] : ""));
        }
Example #21
0
        public override float Predict(int user_id, int item_id)
        {
            int u = user_id;
            int i = item_id;

            // used by WrapRec-based logic
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);

            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
            {
                features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation)
                           .NormalizeSumToOne(Normalize).ToList();
            }

            float alpha_u = weights[0];
            float alpha_i = weights[1];
            float alpha_z = weights.Length > 2 ? weights[2] : 0;

            float score = global_bias;

            if (u < user_bias.Length)
            {
                score += user_bias[u] * alpha_u;
            }

            if (i < item_bias.Length)
            {
                score += item_bias[i] * alpha_i;
            }

            foreach (var feat in features)
            {
                float x_z = feat.Item2;
                score += feature_biases[feat.Item1] * x_z * alpha_z;
            }

            for (int f = 0; f < NumFactors; f++)
            {
                float sum = 0, sum_sq = 0;

                float v_uf = 0, v_if = 0;
                if (u < user_bias.Length)
                {
                    v_uf = user_factors[u, f];
                }
                if (i < item_bias.Length)
                {
                    v_if = item_factors[i, f];
                }

                sum    += v_uf * alpha_u + v_if * alpha_i;
                sum_sq += v_uf * v_uf * alpha_u * alpha_u + v_if * v_if * alpha_i * alpha_i;

                foreach (var feat in features)
                {
                    int   j    = feat.Item1;
                    float x_j  = feat.Item2;
                    float v_jf = feature_factors[j, f];

                    sum    += x_j * v_jf * alpha_z;
                    sum_sq += x_j * x_j * v_jf * v_jf * alpha_z * alpha_z;
                }

                score += 0.5f * (sum * sum - sum_sq);
            }

            if (score > MaxRating)
            {
                return(MaxRating);
            }
            if (score < MinRating)
            {
                return(MinRating);
            }

            return(score);
        }
 public void Sort(ItemsMap items)
 {
     items.Sort(this);
 }
Example #23
0
        public override void Evaluate(Split split, EvaluationContext context)
        {
            ExhaustInternalIds(split);

            PureEvaluationTime = (int)Wrap.MeasureTime(delegate()
            {
                if (DataType == DataType.Ratings)
                {
                    foreach (var feedback in split.Test)
                    {
                        context.PredictedScores.Add(feedback, Predict(feedback));
                    }
                }
                else if (DataType == DataType.TimeAwareRating)
                {
                    var predictor        = (ITimeAwareRatingPredictor)MmlRecommenderInstance;
                    var firstRatingMl10M = new DateTime(1998, 11, 01);

                    foreach (var feedback in split.Test)
                    {
                        var time = firstRatingMl10M.AddDays(double.Parse(feedback.Attributes["timestamp"].Value));
                        context.PredictedScores.Add(feedback,
                                                    predictor.Predict(UsersMap.ToInternalID(feedback.User.Id), ItemsMap.ToInternalID(feedback.Item.Id), time));
                    }
                }
                context.Evaluators.ForEach(e => e.Evaluate(context, this, split));
            }).TotalMilliseconds;
        }
        protected override void UpdateFactors(int user_id, int item_id, int other_item_id, bool update_u, bool update_i, bool update_j)
        {
            // used by WrapRec-based logic
            string userIdOrg = UsersMap.ToOriginalID(user_id);
            string itemIdOrg = ItemsMap.ToOriginalID(item_id);

            List <Tuple <int, float> > features = new List <Tuple <int, float> >();

            if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
            {
                features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes().Select(a => a.Translation).NormalizeSumToOne(Normalize).ToList();
            }

            double item_bias_diff = item_bias[item_id] - item_bias[other_item_id];

            int   g_u     = 0; //FeatureGroups[user_id];
            int   g_i     = 1; //FeatureGroups[item_id];
            float alpha_u = weights[g_u];
            float alpha_i = weights[g_i];

            double u_i_term = MatrixExtensions.RowScalarProductWithRowDifference(
                user_factors, user_id, item_factors, item_id, item_factors, other_item_id);

            double y_uij = item_bias_diff + alpha_u * alpha_i * u_i_term;

            double items_z_term_sum = 0;

            double[] items_z_terms = new double[features.Count];
            double[] group_z_terms = new double[NumGroups - 2];
            int      z             = 0;

            foreach (var feat in features)
            {
                int   g_z     = FeatureGroups[feat.Item1];
                float alpha_z = weights[g_z];
                items_z_terms[z] = feat.Item2 * MatrixExtensions.RowScalarProductWithRowDifference(
                    feature_factors, feat.Item1, item_factors, item_id, item_factors, other_item_id);
                group_z_terms[g_z - 2] += items_z_terms[z];
                items_z_term_sum       += alpha_z * items_z_terms[z];
                z++;
            }
            y_uij += alpha_i * items_z_term_sum;

            double exp     = Math.Exp(y_uij);
            double sigmoid = 1 / (1 + exp);

            // adjust bias terms
            if (update_i)
            {
                double update = sigmoid - BiasReg * item_bias[item_id];
                item_bias[item_id] += (float)(learn_rate * update);
            }

            if (update_j)
            {
                double update = -sigmoid - BiasReg * item_bias[other_item_id];
                item_bias[other_item_id] += (float)(learn_rate * update);
            }

            // adjust factors
            for (int f = 0; f < num_factors; f++)
            {
                float v_uf = user_factors[user_id, f];
                float v_if = item_factors[item_id, f];
                float v_jf = item_factors[other_item_id, f];

                if (update_u)
                {
                    double update = alpha_u * alpha_i * (v_if - v_jf) * sigmoid - reg_u * v_uf;
                    user_factors[user_id, f] = (float)(v_uf + learn_rate * update);
                }

                // update features latent factors and make a sum term to use later for updating item factors
                // sum = Sum_{l=1}{num_features} c_l * v_{c_l,f}
                float sum = 0f;

                foreach (var feat in features)
                {
                    float v_zf    = feature_factors[feat.Item1, f];
                    float x_z     = feat.Item2;
                    int   g_z     = FeatureGroups[feat.Item1];
                    float alpha_z = weights[g_z];

                    sum += x_z * v_zf * alpha_z;

                    double update = alpha_i * alpha_z * x_z * (v_if - v_jf) * sigmoid - reg_c * v_zf;
                    feature_factors[feat.Item1, f] = (float)(v_zf + learn_rate * update);
                }

                if (update_i)
                {
                    double update = (alpha_u * alpha_i * v_uf + alpha_i * sum) * sigmoid - reg_i * v_if;
                    item_factors[item_id, f] = (float)(v_if + learn_rate * update);
                }

                if (update_j)
                {
                    double update = (alpha_u * alpha_i * -v_uf - alpha_i * sum) * sigmoid - reg_j * v_jf;
                    item_factors[other_item_id, f] = (float)(v_jf + learn_rate * update);
                }
            }

            // update alphas
            double update_alpha_u = alpha_i * u_i_term * sigmoid - reg_w * alpha_u;

            weights[g_u] = (float)(alpha_u + learn_rate * update_alpha_u);

            //NormalizeWeights();

            double update_alpha_i = (alpha_u * u_i_term + items_z_term_sum) * sigmoid - reg_w * alpha_i;

            weights[g_i] = (float)(alpha_i + learn_rate * update_alpha_i);

            for (int g = 0; g < NumGroups - 2; g++)
            {
                double alpha_z_g        = weights[g + 2];
                double update_alpha_z_g = alpha_i * group_z_terms[g] * sigmoid - reg_w * alpha_z_g;
                weights[g + 2] = (float)(alpha_z_g + learn_rate * update_alpha_z_g);
            }

            // normalize weights
            NormalizeWeights();
        }
Example #25
0
 public override float Predict(string userId, string itemId)
 {
     return(MmlRecommenderInstance.Predict(UsersMap.ToInternalID(userId), ItemsMap.ToInternalID(itemId)));
 }
Example #26
0
        protected void _Iterate(IList <int> rating_indices, bool update_user, bool update_item)
        {
            foreach (int index in rating_indices)
            {
                int u = ratings.Users[index];
                int i = ratings.Items[index];

                // used by WrapRec-based logic
                string userIdOrg = UsersMap.ToOriginalID(u);
                string itemIdOrg = ItemsMap.ToOriginalID(i);

                List <Tuple <int, float> > features = new List <Tuple <int, float> >();
                if (Split.SetupParameters.ContainsKey("feedbackAttributes"))
                {
                    features = Split.Container.FeedbacksDic[userIdOrg, itemIdOrg].GetAllAttributes()
                               .Select(a => a.Translation).NormalizeSumToOne(Normalize).ToList();
                }

                var   p   = Predict(u, i);
                float err = (p - ratings[index]) * 2;

                float[] sum = new float[NumFactors];
                foreach (var feature in features)
                {
                    int   j   = feature.Item1;
                    float x_j = feature.Item2;

                    for (int f = 0; f < NumFactors; f++)
                    {
                        sum[f] += feature_factors[j, f] * x_j;
                    }
                }

                for (int f = 0; f < NumFactors; f++)
                {
                    sum[f] += user_factors[u, f] + item_factors[i, f];
                }

                // adjust biases
                //global_bias -= current_learnrate*(err + RegB*global_bias);

                if (update_user)
                {
                    user_bias[u] -= current_learnrate * (err + RegU * user_bias[u]);
                }
                if (update_item)
                {
                    item_bias[i] -= current_learnrate * (err + RegI * item_bias[i]);
                }

                foreach (var feature in features)
                {
                    int   j   = feature.Item1;
                    float x_j = feature.Item2;
                    float w_j = feature_biases[j];

                    feature_biases[j] -= BiasLearnRate * current_learnrate * (x_j * err + BiasReg * RegC * w_j);
                }

                // adjust latent factors
                for (int f = 0; f < NumFactors; f++)
                {
                    double v_uf = user_factors[u, f];
                    double v_if = item_factors[i, f];

                    if (update_user)
                    {
                        double delta_u = (sum[f] - v_uf) * err + RegU * v_uf;
                        user_factors.Inc(u, f, -current_learnrate * delta_u);
                    }
                    if (update_item)
                    {
                        double delta_i = (sum[f] - v_if) * err + RegI * v_if;
                        item_factors.Inc(i, f, -current_learnrate * delta_i);
                    }

                    foreach (var feature in features)
                    {
                        int   j    = feature.Item1;
                        float x_j  = feature.Item2;
                        float v_jf = feature_factors[j, f];

                        double delta_j = (sum[f] * x_j - v_jf * x_j * x_j) * err + RegC * v_jf;
                        feature_factors.Inc(j, f, -current_learnrate * delta_j);
                    }
                }
            }
        }