public void TestImplicitTrust() { var socialReguls = new float[] { 1F }; var numFactors = new uint[] { 5, 10 }; var trustScores = new string[] { "trust_values_LATHIA.dat", "trust_values_HWANGCHEN.dat", "trust_values_ODONOVAN.dat", "trust_values_PEARSON.dat", "trust_values_SHAMBOURLU.dat" }; // step 1: dataset var config = new CsvConfiguration(); config.Delimiter = " "; var trainReader = new CsvReader <ItemRating>(Paths.EpinionTrain80, config, new ItemRatingMap()); var testReader = new CsvReader <ItemRating>(Paths.EpinionTest20, config, new ItemRatingMap()); var dataset = new Dataset <ItemRating>(trainReader, testReader); foreach (string scoreFile in trustScores) { var relations = File.ReadAllLines(Paths.EpinionRelationsImplicit + scoreFile).ToCsvDictionary('\t') .Select(i => new Relation() { UserId = i["UserId"], ConnectedId = i["ConnectionId"], ConnectionStrength = float.Parse(i["Strength"]) }); //.Where(r => r.ConnectionStrength > 1F); string rmseValues = "", maeValues = ""; foreach (uint num in numFactors) { // step 2: recommender var algorithm = new SocialMF(); algorithm.SocialRegularization = 1; algorithm.NumFactors = num; var recommender = new MediaLiteRatingPredictor(algorithm, relations); // step3: evaluation var context = new EvalutationContext <ItemRating>(recommender, dataset); var ep = new EvaluationPipeline <ItemRating>(context); ep.Evaluators.Add(new RMSE()); ep.Evaluators.Add(new MAE()); ep.Run(); rmseValues += context["RMSE"] + "\t"; maeValues += context["MAE"] + "\t"; } Console.WriteLine(scoreFile + "\t" + rmseValues + "\t" + maeValues); } }
public void TestExplicitTrust() { //var socialReguls = new float[] { 0.1F, 0.2F, 0.5F, 0.8F, 1F, 1.5F, 2F, 3F, 5F}; //var numFactors = new uint[] {2, 5, 10, 15, 20}; var socialReguls = new float[] { 1 }; var numFactors = new uint[] { 5, 10 }; // step 1: dataset var config = new CsvConfiguration(); config.Delimiter = " "; var trainReader = new CsvReader <ItemRating>(Paths.EpinionTrain75, config, new ItemRatingMap()); var testReader = new CsvReader <ItemRating>(Paths.EpinionTest25, config, new ItemRatingMap()); var dataset = new Dataset <ItemRating>(trainReader, testReader); var relations = File.ReadAllLines(Paths.EpinionRelations).ToCsvDictionary(' ') .Select(i => new Relation() { UserId = i["UserId"], ConnectedId = i["ConnectionId"], DatasetId = 1 }); foreach (uint num in numFactors) { string rmseValues = "", maeValues = ""; foreach (float regul in socialReguls) { // step 2: recommender var algorithm = new MatrixFactorization(); algorithm.NumFactors = num; //algorithm.SocialRegularization = regul; var recommender = new MediaLiteRatingPredictor(algorithm, relations); // step3: evaluation var context = new EvalutationContext <ItemRating>(recommender, dataset); var ep = new EvaluationPipeline <ItemRating>(context); ep.Evaluators.Add(new RMSE()); ep.Evaluators.Add(new MAE()); ep.Run(); rmseValues += context["RMSE"] + "\t"; maeValues += context["MAE"] + "\t"; } Console.WriteLine(num + "\t" + rmseValues + "\t" + maeValues); } }
public void TrainAndTest() { // step 1: dataset var dataContext = DataManager.GetDataContext(); var modelDataset = dataContext.Datasets.Where(d => d.Name.ToLower() == "epinion").Single(); var dataset = new RatingDataset(modelDataset, dataContext, new TrainTestSplitter(0.3)); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step3: evaluation var ep = new EvaluationPipeline <Rating>(new EvalutationContext <Rating>(recommender, dataset)); //ep.Evaluators.Add(new RMSE()); ep.Run(); }
public void RunDemo() { // step 1: load dataset var container = new CrowdRecDataContainer(); var reader = new CrowdRecDataReader(_entitesFile, _relationsFile); reader.LoadData(container); var dataset = new ItemRatingDataset(container, 0.3f); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step 3: evaluations var pipline = new EvaluationPipeline<ItemRating>(new EvalutationContext<ItemRating>(recommender, dataset)); pipline.Evaluators.Add(new RMSE()); pipline.Evaluators.Add(new MAE()); pipline.Run(); }
public void TestBooksWithMF() { // step 1: dataset if (!File.Exists(_trainPath)) FileHelper.SplitLines(_booksPath, _trainPath, _testPath, 0.75, true, true); var trainReader = new CsvReader<ItemRating>(_trainPath, new ItemRatingMap()); var testReader = new CsvReader<ItemRating>(_testPath, new ItemRatingMap()); var dataset = new Dataset<ItemRating>(trainReader, testReader); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step3: evaluation var ep = new EvaluationPipeline<ItemRating>(new EvalutationContext<ItemRating>(recommender, dataset)); ep.Evaluators.Add(new RMSE()); ep.Run(); }
public void RunDemo() { // step 1: load dataset var container = new CrowdRecDataContainer(); var reader = new CrowdRecDataReader(_entitesFile, _relationsFile); reader.LoadData(container); var dataset = new ItemRatingDataset(container, 0.3f); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step 3: evaluations var pipline = new EvaluationPipeline <ItemRating>(new EvalutationContext <ItemRating>(recommender, dataset)); pipline.Evaluators.Add(new RMSE()); pipline.Evaluators.Add(new MAE()); pipline.Run(); }
public void TestBooksWithMF() { // step 1: dataset if (!File.Exists(_trainPath)) { FileHelper.SplitLines(_booksPath, _trainPath, _testPath, 0.75, true, true); } var trainReader = new CsvReader <ItemRating>(_trainPath, new ItemRatingMap()); var testReader = new CsvReader <ItemRating>(_testPath, new ItemRatingMap()); var dataset = new Dataset <ItemRating>(trainReader, testReader); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step3: evaluation var ep = new EvaluationPipeline <ItemRating>(new EvalutationContext <ItemRating>(recommender, dataset)); ep.Evaluators.Add(new RMSE()); ep.Run(); }
public void TestAmazonDatasetSingle() { // step 1: dataset var config = new CsvConfiguration() { Delimiter = ",", HasHeaderRecord = true }; var trainReader = new CsvReader <ItemRating>(Paths.AmazonBooksTrain75, config, new ItemRatingMap()); var testReader = new CsvReader <ItemRating>(Paths.AmazonBooksTest25, config, new ItemRatingMap()); var dataset = new Dataset <ItemRating>(trainReader, testReader); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step3: evaluation var ep = new EvaluationPipeline <ItemRating>(new EvalutationContext <ItemRating>(recommender, dataset)); ep.Evaluators.Add(new RMSE()); ep.Evaluators.Add(new MAE()); ep.Run(); }
public void TestAmazonDatasetSingle() { // step 1: dataset var config = new CsvConfiguration() { Delimiter = ",", HasHeaderRecord = true }; var trainReader = new CsvReader<ItemRating>(Paths.AmazonBooksTrain75, config, new ItemRatingMap()); var testReader = new CsvReader<ItemRating>(Paths.AmazonBooksTest25, config, new ItemRatingMap()); var dataset = new Dataset<ItemRating>(trainReader, testReader); // step 2: recommender var recommender = new MediaLiteRatingPredictor(new BiasedMatrixFactorization()); // step3: evaluation var ep = new EvaluationPipeline<ItemRating>(new EvalutationContext<ItemRating>(recommender, dataset)); ep.Evaluators.Add(new RMSE()); ep.Evaluators.Add(new MAE()); ep.Run(); }