/// <p> /// Creates a possibly weighted {@link AbstractSimilarity}. /// </p> public AbstractSimilarity(IDataModel dataModel, Weighting weighting, bool centerData) : base(dataModel) { this.weighted = weighting == Weighting.WEIGHTED; this.centerData = centerData; this.cachedNumItems = dataModel.GetNumItems(); this.cachedNumUsers = dataModel.GetNumUsers(); this.refreshHelper = new RefreshHelper( () => { cachedNumItems = dataModel.GetNumItems(); cachedNumUsers = dataModel.GetNumUsers(); } ); }
/// <p> /// Creates a possibly weighted {@link AbstractSimilarity}. /// </p> public AbstractSimilarity(IDataModel dataModel, Weighting weighting, bool centerData) : base(dataModel) { this.weighted = weighting == Weighting.WEIGHTED; this.centerData = centerData; this.cachedNumItems = dataModel.GetNumItems(); this.cachedNumUsers = dataModel.GetNumUsers(); this.refreshHelper = new RefreshHelper(() => { cachedNumItems = dataModel.GetNumItems(); cachedNumUsers = dataModel.GetNumUsers(); } ); }
public override IList <IRecommendedItem> Recommend(long userID, int howMany, IDRescorer rescorer) { IDataModel dataModel = GetDataModel(); int numItems = dataModel.GetNumItems(); List <IRecommendedItem> result = new List <IRecommendedItem>(howMany); while (result.Count < howMany) { var it = dataModel.GetItemIDs(); it.MoveNext(); var skipNum = random.nextInt(numItems); for (int i = 0; i < skipNum; i++) { if (!it.MoveNext()) { break; } // skip() ?? } long itemID = it.Current; if (dataModel.GetPreferenceValue(userID, itemID) == null) { result.Add(new GenericRecommendedItem(itemID, randomPref())); } } return(result); }
public static void Evaluate(IRecommender recommender, IDataModel model, int samples, IRunningAverage tracker, String tag) { printHeader(); var users = recommender.GetDataModel().GetUserIDs(); while (users.MoveNext()) { long userID = users.Current; var recs1 = recommender.Recommend(userID, model.GetNumItems()); IPreferenceArray prefs2 = model.GetPreferencesFromUser(userID); prefs2.SortByValueReversed(); FastIDSet commonSet = new FastIDSet(); long maxItemID = setBits(commonSet, recs1, samples); FastIDSet otherSet = new FastIDSet(); maxItemID = Math.Max(maxItemID, setBits(otherSet, prefs2, samples)); int max = mask(commonSet, otherSet, maxItemID); max = Math.Min(max, samples); if (max < 2) { continue; } long[] items1 = getCommonItems(commonSet, recs1, max); long[] items2 = getCommonItems(commonSet, prefs2, max); double variance = scoreCommonSubset(tag, userID, samples, max, items1, items2); tracker.AddDatum(variance); } }
protected void initialize() { RandomWrapper random = RandomUtils.getRandom(); userVectors = new double[dataModel.GetNumUsers()][]; itemVectors = new double[dataModel.GetNumItems()][]; double globalAverage = getAveragePreference(); for (int userIndex = 0; userIndex < userVectors.Length; userIndex++) { userVectors[userIndex] = new double[rank]; userVectors[userIndex][0] = globalAverage; userVectors[userIndex][USER_BIAS_INDEX] = 0; // will store user bias userVectors[userIndex][ITEM_BIAS_INDEX] = 1; // corresponding item feature contains item bias for (int feature = FEATURE_OFFSET; feature < rank; feature++) { userVectors[userIndex][feature] = random.nextGaussian() * NOISE; } } for (int itemIndex = 0; itemIndex < itemVectors.Length; itemIndex++) { itemVectors[itemIndex] = new double[rank]; itemVectors[itemIndex][0] = 1; // corresponding user feature contains global average itemVectors[itemIndex][USER_BIAS_INDEX] = 1; // corresponding user feature contains user bias itemVectors[itemIndex][ITEM_BIAS_INDEX] = 0; // will store item bias for (int feature = FEATURE_OFFSET; feature < rank; feature++) { itemVectors[itemIndex][feature] = random.nextGaussian() * NOISE; } } }
public Features(ALSWRFactorizer factorizer) { dataModel = factorizer.dataModel; numFeatures = factorizer.numFeatures; var random = RandomUtils.getRandom(); M = new double[dataModel.GetNumItems()][]; //numFeatures var itemIDsIterator = dataModel.GetItemIDs(); while (itemIDsIterator.MoveNext()) { long itemID = itemIDsIterator.Current; int itemIDIndex = factorizer.itemIndex(itemID); M[itemIDIndex] = new double[numFeatures]; M[itemIDIndex][0] = averateRating(itemID); for (int feature = 1; feature < numFeatures; feature++) { M[itemIDIndex][feature] = random.nextDouble() * 0.1; } } U = new double[dataModel.GetNumUsers()][]; //numFeatures for (int i = 0; i < U.Length; i++) { U[i] = new double[numFeatures]; } }
protected override FastIDSet doGetCandidateItems(long[] preferredItemIDs, IDataModel dataModel) { FastIDSet possibleItemIDs = new FastIDSet(dataModel.GetNumItems()); var allItemIDs = dataModel.GetItemIDs(); while (allItemIDs.MoveNext()) { possibleItemIDs.Add(allItemIDs.Current); } possibleItemIDs.RemoveAll(preferredItemIDs); return possibleItemIDs; }
protected override FastIDSet doGetCandidateItems(long[] preferredItemIDs, IDataModel dataModel) { FastIDSet possibleItemIDs = new FastIDSet(dataModel.GetNumItems()); var allItemIDs = dataModel.GetItemIDs(); while (allItemIDs.MoveNext()) { possibleItemIDs.Add(allItemIDs.Current); } possibleItemIDs.RemoveAll(preferredItemIDs); return(possibleItemIDs); }
public void initializeM() { ALSWRFactorizer.Features features = new ALSWRFactorizer.Features(factorizer); double[][] M = features.getM(); Assert.AreEqual(3.333333333, M[0][0], EPSILON); Assert.AreEqual(5, M[1][0], EPSILON); Assert.AreEqual(2.5, M[2][0], EPSILON); Assert.AreEqual(4.333333333, M[3][0], EPSILON); for (int itemIndex = 0; itemIndex < dataModel.GetNumItems(); itemIndex++) { for (int feature = 1; feature < 3; feature++) { Assert.True(M[itemIndex][feature] >= 0); Assert.True(M[itemIndex][feature] <= 0.1); } } }
protected virtual void prepareTraining() { RandomWrapper random = RandomUtils.getRandom(); userVectors = new double[dataModel.GetNumUsers()][]; //numFeatures itemVectors = new double[dataModel.GetNumItems()][]; double globalAverage = getAveragePreference(); for (int userIndex = 0; userIndex < userVectors.Length; userIndex++) { userVectors[userIndex] = new double[numFeatures]; userVectors[userIndex][0] = globalAverage; userVectors[userIndex][USER_BIAS_INDEX] = 0; // will store user bias userVectors[userIndex][ITEM_BIAS_INDEX] = 1; // corresponding item feature contains item bias for (int feature = FEATURE_OFFSET; feature < numFeatures; feature++) { userVectors[userIndex][feature] = random.nextGaussian() * randomNoise; } } for (int itemIndex = 0; itemIndex < itemVectors.Length; itemIndex++) { itemVectors[itemIndex] = new double[numFeatures]; itemVectors[itemIndex][0] = 1; // corresponding user feature contains global average itemVectors[itemIndex][USER_BIAS_INDEX] = 1; // corresponding user feature contains user bias itemVectors[itemIndex][ITEM_BIAS_INDEX] = 0; // will store item bias for (int feature = FEATURE_OFFSET; feature < numFeatures; feature++) { itemVectors[itemIndex][feature] = random.nextGaussian() * randomNoise; } } cachePreferences(); shufflePreferences(); }
public double UserSimilarity(long userID1, long userID2) { IDataModel dataModel = getDataModel(); FastIDSet prefs1 = dataModel.GetItemIDsFromUser(userID1); FastIDSet prefs2 = dataModel.GetItemIDsFromUser(userID2); long prefs1Size = prefs1.Count(); long prefs2Size = prefs2.Count(); long intersectionSize = prefs1Size < prefs2Size?prefs2.IntersectionSize(prefs1) : prefs1.IntersectionSize(prefs2); if (intersectionSize == 0) { return(Double.NaN); } long numItems = dataModel.GetNumItems(); double logLikelihood = LogLikelihood.logLikelihoodRatio(intersectionSize, prefs2Size - intersectionSize, prefs1Size - intersectionSize, numItems - prefs1Size - prefs2Size + intersectionSize); return(1.0 - 1.0 / (1.0 + logLikelihood)); }
private void buildMappings() { userIDMapping = createIDMapping(dataModel.GetNumUsers(), dataModel.GetUserIDs()); itemIDMapping = createIDMapping(dataModel.GetNumItems(), dataModel.GetItemIDs()); }
/// Creates this on top of the given {@link ItemSimilarity}. /// The cache is sized according to properties of the given {@link DataModel}. public CachingItemSimilarity(IItemSimilarity similarity, IDataModel dataModel) : this(similarity, dataModel.GetNumItems()) { ; }
public virtual int GetNumItems() { return(_delegate.GetNumItems()); }
public IRStatistics Evaluate(IRecommenderBuilder recommenderBuilder, IDataModelBuilder dataModelBuilder, IDataModel dataModel, IDRescorer rescorer, int at, double relevanceThreshold, double evaluationPercentage) { //Preconditions.checkArgument(recommenderBuilder != null, "recommenderBuilder is null"); //Preconditions.checkArgument(dataModel != null, "dataModel is null"); //Preconditions.checkArgument(at >= 1, "at must be at least 1"); //Preconditions.checkArgument(evaluationPercentage > 0.0 && evaluationPercentage <= 1.0, // "Invalid evaluationPercentage: " + evaluationPercentage + ". Must be: 0.0 < evaluationPercentage <= 1.0"); int numItems = dataModel.GetNumItems(); IRunningAverage precision = new FullRunningAverage(); IRunningAverage recall = new FullRunningAverage(); IRunningAverage fallOut = new FullRunningAverage(); IRunningAverage nDCG = new FullRunningAverage(); int numUsersRecommendedFor = 0; int numUsersWithRecommendations = 0; var it = dataModel.GetUserIDs(); while (it.MoveNext()) { long userID = it.Current; if (random.nextDouble() >= evaluationPercentage) { // Skipped continue; } var stopWatch = new System.Diagnostics.Stopwatch(); stopWatch.Start(); IPreferenceArray prefs = dataModel.GetPreferencesFromUser(userID); // List some most-preferred items that would count as (most) "relevant" results double theRelevanceThreshold = Double.IsNaN(relevanceThreshold) ? computeThreshold(prefs) : relevanceThreshold; FastIDSet relevantItemIDs = dataSplitter.GetRelevantItemsIDs(userID, at, theRelevanceThreshold, dataModel); int numRelevantItems = relevantItemIDs.Count(); if (numRelevantItems <= 0) { continue; } FastByIDMap <IPreferenceArray> trainingUsers = new FastByIDMap <IPreferenceArray>(dataModel.GetNumUsers()); var it2 = dataModel.GetUserIDs(); while (it2.MoveNext()) { dataSplitter.ProcessOtherUser(userID, relevantItemIDs, trainingUsers, it2.Current, dataModel); } IDataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingUsers) : dataModelBuilder.BuildDataModel(trainingUsers); try { trainingModel.GetPreferencesFromUser(userID); } catch (NoSuchUserException nsee) { continue; // Oops we excluded all prefs for the user -- just move on } int size = numRelevantItems + trainingModel.GetItemIDsFromUser(userID).Count(); if (size < 2 * at) { // Really not enough prefs to meaningfully evaluate this user continue; } IRecommender recommender = recommenderBuilder.BuildRecommender(trainingModel); int intersectionSize = 0; var recommendedItems = recommender.Recommend(userID, at, rescorer); foreach (IRecommendedItem recommendedItem in recommendedItems) { if (relevantItemIDs.Contains(recommendedItem.GetItemID())) { intersectionSize++; } } int numRecommendedItems = recommendedItems.Count; // Precision if (numRecommendedItems > 0) { precision.AddDatum((double)intersectionSize / (double)numRecommendedItems); } // Recall recall.AddDatum((double)intersectionSize / (double)numRelevantItems); // Fall-out if (numRelevantItems < size) { fallOut.AddDatum((double)(numRecommendedItems - intersectionSize) / (double)(numItems - numRelevantItems)); } // nDCG // In computing, assume relevant IDs have relevance 1 and others 0 double cumulativeGain = 0.0; double idealizedGain = 0.0; for (int i = 0; i < numRecommendedItems; i++) { IRecommendedItem item = recommendedItems[i]; double discount = 1.0 / log2(i + 2.0); // Classical formulation says log(i+1), but i is 0-based here if (relevantItemIDs.Contains(item.GetItemID())) { cumulativeGain += discount; } // otherwise we're multiplying discount by relevance 0 so it doesn't do anything // Ideally results would be ordered with all relevant ones first, so this theoretical // ideal list starts with number of relevant items equal to the total number of relevant items if (i < numRelevantItems) { idealizedGain += discount; } } if (idealizedGain > 0.0) { nDCG.AddDatum(cumulativeGain / idealizedGain); } // Reach numUsersRecommendedFor++; if (numRecommendedItems > 0) { numUsersWithRecommendations++; } stopWatch.Stop(); log.Info("Evaluated with user {} in {}ms", userID, stopWatch.ElapsedMilliseconds); log.Info("Precision/recall/fall-out/nDCG/reach: {} / {} / {} / {} / {}", precision.GetAverage(), recall.GetAverage(), fallOut.GetAverage(), nDCG.GetAverage(), (double)numUsersWithRecommendations / (double)numUsersRecommendedFor); } return(new IRStatisticsImpl( precision.GetAverage(), recall.GetAverage(), fallOut.GetAverage(), nDCG.GetAverage(), (double)numUsersWithRecommendations / (double)numUsersRecommendedFor)); }
public Features(ALSWRFactorizer factorizer) { dataModel = factorizer.dataModel; numFeatures = factorizer.numFeatures; var random = RandomUtils.getRandom(); M = new double[dataModel.GetNumItems()][]; //numFeatures var itemIDsIterator = dataModel.GetItemIDs(); while (itemIDsIterator.MoveNext()) { long itemID = itemIDsIterator.Current; int itemIDIndex = factorizer.itemIndex(itemID); M[itemIDIndex] = new double[numFeatures]; M[itemIDIndex][0] = averateRating(itemID); for (int feature = 1; feature < numFeatures; feature++) { M[itemIDIndex][feature] = random.nextDouble() * 0.1; } } U = new double[dataModel.GetNumUsers()][]; //numFeatures for (int i=0; i<U.Length; i++) U[i] = new double[numFeatures]; }
public override Factorization Factorize() { log.Info("starting to compute the factorization..."); Features features = new Features(this); /// feature maps necessary for solving for implicit feedback IDictionary <int, double[]> userY = null; IDictionary <int, double[]> itemY = null; if (usesImplicitFeedback) { userY = userFeaturesMapping(dataModel.GetUserIDs(), dataModel.GetNumUsers(), features.getU()); itemY = itemFeaturesMapping(dataModel.GetItemIDs(), dataModel.GetNumItems(), features.getM()); } IList <Task> tasks; for (int iteration = 0; iteration < numIterations; iteration++) { log.Info("iteration {0}", iteration); /// fix M - compute U tasks = new List <Task>(); var userIDsIterator = dataModel.GetUserIDs(); try { ImplicitFeedbackAlternatingLeastSquaresSolver implicitFeedbackSolver = usesImplicitFeedback ? new ImplicitFeedbackAlternatingLeastSquaresSolver(numFeatures, lambda, alpha, itemY) : null; while (userIDsIterator.MoveNext()) { long userID = userIDsIterator.Current; var itemIDsFromUser = dataModel.GetItemIDsFromUser(userID).GetEnumerator(); IPreferenceArray userPrefs = dataModel.GetPreferencesFromUser(userID); tasks.Add(Task.Factory.StartNew(() => { List <double[]> featureVectors = new List <double[]>(); while (itemIDsFromUser.MoveNext()) { long itemID = itemIDsFromUser.Current; featureVectors.Add(features.getItemFeatureColumn(itemIndex(itemID))); } var userFeatures = usesImplicitFeedback ? implicitFeedbackSolver.solve(sparseUserRatingVector(userPrefs)) : AlternatingLeastSquaresSolver.solve(featureVectors, ratingVector(userPrefs), lambda, numFeatures); features.setFeatureColumnInU(userIndex(userID), userFeatures); } )); } } finally { // queue.shutdown(); try { Task.WaitAll(tasks.ToArray(), 1000 * dataModel.GetNumUsers()); } catch (AggregateException e) { log.Warn("Error when computing user features", e); throw e; } } /// fix U - compute M //queue = createQueue(); tasks = new List <Task>(); var itemIDsIterator = dataModel.GetItemIDs(); try { ImplicitFeedbackAlternatingLeastSquaresSolver implicitFeedbackSolver = usesImplicitFeedback ? new ImplicitFeedbackAlternatingLeastSquaresSolver(numFeatures, lambda, alpha, userY) : null; while (itemIDsIterator.MoveNext()) { long itemID = itemIDsIterator.Current; IPreferenceArray itemPrefs = dataModel.GetPreferencesForItem(itemID); tasks.Add(Task.Factory.StartNew(() => { var featureVectors = new List <double[]>(); foreach (IPreference pref in itemPrefs) { long userID = pref.GetUserID(); featureVectors.Add(features.getUserFeatureColumn(userIndex(userID))); } var itemFeatures = usesImplicitFeedback ? implicitFeedbackSolver.solve(sparseItemRatingVector(itemPrefs)) : AlternatingLeastSquaresSolver.solve(featureVectors, ratingVector(itemPrefs), lambda, numFeatures); features.setFeatureColumnInM(itemIndex(itemID), itemFeatures); })); } } finally { try { Task.WaitAll(tasks.ToArray(), 1000 * dataModel.GetNumItems()); //queue.awaitTermination(dataModel.getNumItems(), TimeUnit.SECONDS); } catch (AggregateException e) { log.Warn("Error when computing item features", e); throw e; } } } log.Info("finished computation of the factorization..."); return(createFactorization(features.getU(), features.getM())); }
public IRStatistics Evaluate(IRecommenderBuilder recommenderBuilder, IDataModelBuilder dataModelBuilder, IDataModel dataModel, IDRescorer rescorer, int at, double relevanceThreshold, double evaluationPercentage) { //Preconditions.checkArgument(recommenderBuilder != null, "recommenderBuilder is null"); //Preconditions.checkArgument(dataModel != null, "dataModel is null"); //Preconditions.checkArgument(at >= 1, "at must be at least 1"); //Preconditions.checkArgument(evaluationPercentage > 0.0 && evaluationPercentage <= 1.0, // "Invalid evaluationPercentage: " + evaluationPercentage + ". Must be: 0.0 < evaluationPercentage <= 1.0"); int numItems = dataModel.GetNumItems(); IRunningAverage precision = new FullRunningAverage(); IRunningAverage recall = new FullRunningAverage(); IRunningAverage fallOut = new FullRunningAverage(); IRunningAverage nDCG = new FullRunningAverage(); int numUsersRecommendedFor = 0; int numUsersWithRecommendations = 0; var it = dataModel.GetUserIDs(); while (it.MoveNext()) { long userID = it.Current; if (random.nextDouble() >= evaluationPercentage) { // Skipped continue; } var stopWatch = new System.Diagnostics.Stopwatch(); stopWatch.Start(); IPreferenceArray prefs = dataModel.GetPreferencesFromUser(userID); // List some most-preferred items that would count as (most) "relevant" results double theRelevanceThreshold = Double.IsNaN(relevanceThreshold) ? computeThreshold(prefs) : relevanceThreshold; FastIDSet relevantItemIDs = dataSplitter.GetRelevantItemsIDs(userID, at, theRelevanceThreshold, dataModel); int numRelevantItems = relevantItemIDs.Count(); if (numRelevantItems <= 0) { continue; } FastByIDMap<IPreferenceArray> trainingUsers = new FastByIDMap<IPreferenceArray>(dataModel.GetNumUsers()); var it2 = dataModel.GetUserIDs(); while (it2.MoveNext()) { dataSplitter.ProcessOtherUser(userID, relevantItemIDs, trainingUsers, it2.Current, dataModel); } IDataModel trainingModel = dataModelBuilder == null ? new GenericDataModel(trainingUsers) : dataModelBuilder.BuildDataModel(trainingUsers); try { trainingModel.GetPreferencesFromUser(userID); } catch (NoSuchUserException nsee) { continue; // Oops we excluded all prefs for the user -- just move on } int size = numRelevantItems + trainingModel.GetItemIDsFromUser(userID).Count(); if (size < 2 * at) { // Really not enough prefs to meaningfully evaluate this user continue; } IRecommender recommender = recommenderBuilder.BuildRecommender(trainingModel); int intersectionSize = 0; var recommendedItems = recommender.Recommend(userID, at, rescorer); foreach (IRecommendedItem recommendedItem in recommendedItems) { if (relevantItemIDs.Contains(recommendedItem.GetItemID())) { intersectionSize++; } } int numRecommendedItems = recommendedItems.Count; // Precision if (numRecommendedItems > 0) { precision.AddDatum((double) intersectionSize / (double) numRecommendedItems); } // Recall recall.AddDatum((double) intersectionSize / (double) numRelevantItems); // Fall-out if (numRelevantItems < size) { fallOut.AddDatum((double) (numRecommendedItems - intersectionSize) / (double) (numItems - numRelevantItems)); } // nDCG // In computing, assume relevant IDs have relevance 1 and others 0 double cumulativeGain = 0.0; double idealizedGain = 0.0; for (int i = 0; i < numRecommendedItems; i++) { IRecommendedItem item = recommendedItems[i]; double discount = 1.0 / log2(i + 2.0); // Classical formulation says log(i+1), but i is 0-based here if (relevantItemIDs.Contains(item.GetItemID())) { cumulativeGain += discount; } // otherwise we're multiplying discount by relevance 0 so it doesn't do anything // Ideally results would be ordered with all relevant ones first, so this theoretical // ideal list starts with number of relevant items equal to the total number of relevant items if (i < numRelevantItems) { idealizedGain += discount; } } if (idealizedGain > 0.0) { nDCG.AddDatum(cumulativeGain / idealizedGain); } // Reach numUsersRecommendedFor++; if (numRecommendedItems > 0) { numUsersWithRecommendations++; } stopWatch.Stop(); log.Info("Evaluated with user {} in {}ms", userID, stopWatch.ElapsedMilliseconds); log.Info("Precision/recall/fall-out/nDCG/reach: {} / {} / {} / {} / {}", precision.GetAverage(), recall.GetAverage(), fallOut.GetAverage(), nDCG.GetAverage(), (double) numUsersWithRecommendations / (double) numUsersRecommendedFor); } return new IRStatisticsImpl( precision.GetAverage(), recall.GetAverage(), fallOut.GetAverage(), nDCG.GetAverage(), (double) numUsersWithRecommendations / (double) numUsersRecommendedFor); }
public override int GetNumItems() { return(_delegate.GetNumItems()); }