/*public void SetMaxUsers(int i) * { * _engine.MaxUsers = i; * } * * public void SetSimilaritySaf(double p) * { * _engine.MinSimilarity = p; * }*/ //TODO public Dictionary <PredictionMethod, double> ComputeRMSE(List <PredictionMethod> lMethods, out Dictionary <PredictionMethod, Dictionary <PredictionMethod, double> > dConfidence) { _allTest = _test.GetAllData(); Dictionary <PredictionMethod, double> res = new Dictionary <PredictionMethod, double>(); foreach (var m in lMethods) { res.Add(m, ComputeRMSE(m, _trainEngine, _allTest)); } //TODO dConfidence = new Dictionary <PredictionMethod, Dictionary <PredictionMethod, double> >(); foreach (var m1 in lMethods) { dConfidence.Add(m1, new Dictionary <PredictionMethod, double>()); foreach (var m2 in lMethods) { if (m1 != m2) { dConfidence[m1].Add(m2, SignTest(m1, m2)); } } } return(res); }
public void TrainBaseModel(int cFeatures) { double avg = (_train.SumRanks() + _test.SumRanks()) / (_train.NumOfRanks() + _test.NumOfRanks()); var svd = new SVD(avg, cFeatures); RankingDB train = new RankingDB(); RankingDB validation = new RankingDB(); DivideDB(_train, 0.95, train, validation); var ranks = train.GetAllData(); double RMSE = double.MaxValue; double LastRMSE = double.MaxValue; while (RMSE <= LastRMSE) { LastRMSE = RMSE; svd.Train(ranks); _trainEngine.setSVD(svd); RMSE = ComputeRMSE(PredictionMethod.SVD, _trainEngine, validation.GetAllData()); } //_engine.setSVD(svd); }
private double ComputeRMSE(PredictionMethod m, RankingDB db) { return(ComputeRMSE(m, new RecommenderSystemEngine(db), db.GetAllData())); }