private void Init() { this.split = new RatingsSimpleSplit(recommender.Ratings, split_ratio); //this.split = new RatingCrossValidationSplit(recommender.Ratings, 5); // TODO manage this via reflection? if (recommender is UserItemBaseline) { this.hp_names = new string[] { "reg_u", "reg_i" }; this.initial_hp_values = CreateInitialValues( new double[][] { new double[] { 25, 10 }, new double[] { 10, 25 }, new double[] { 2, 5 }, new double[] { 5, 2 }, new double[] { 1, 4 }, new double[] { 4, 1 }, new double[] { 3, 3 }, } ); } else if (recommender is BiasedMatrixFactorization) { this.hp_names = new string[] { "regularization", "bias_reg" }; this.initial_hp_values = CreateInitialValues( // TODO reg_u and reg_i (in a second step?) new double[][] { new double[] { 0.1, 0 }, new double[] { 0.01, 0 }, new double[] { 0.0001, 0 }, new double[] { 0.00001, 0 }, new double[] { 0.1, 0.0001 }, new double[] { 0.01, 0.0001 }, new double[] { 0.0001, 0.0001 }, new double[] { 0.00001, 0.0001 }, } ); } else if (recommender is MatrixFactorization) { this.hp_names = new string[] { "regularization" }; // TODO normal interval search could be more efficient this.initial_hp_values = CreateInitialValues( new double[][] { new double[] { 0.1 }, new double[] { 0.01 }, new double[] { 0.0001 }, new double[] { 0.00001 }, } ); } // TODO kNN-based methods else { throw new Exception("not prepared for type " + recommender.GetType().ToString()); } }
/// <summary>Find best hyperparameter (according to an error measure) using Nelder-Mead search</summary> /// <param name="error_measure">an error measure (lower is better)</param> /// <param name="recommender">a rating predictor (will be set to best hyperparameter combination)</param> /// <returns>the estimated error of the best hyperparameter combination</returns> public static double FindMinimum( string error_measure, RatingPredictor recommender) { var split = new RatingsSimpleSplit(recommender.Ratings, split_ratio); //var split = new RatingCrossValidationSplit(recommender.Ratings, 5); IList <string> hp_names; IList <DenseVector> initial_hp_values; // TODO manage this via reflection? if (recommender is UserItemBaseline) { hp_names = new string[] { "reg_u", "reg_i" }; initial_hp_values = new DenseVector[] { new DenseVector(new double[] { 25, 10 }), new DenseVector(new double[] { 10, 25 }), new DenseVector(new double[] { 2, 5 }), new DenseVector(new double[] { 5, 2 }), new DenseVector(new double[] { 1, 4 }), new DenseVector(new double[] { 4, 1 }), new DenseVector(new double[] { 3, 3 }), }; } else if (recommender is BiasedMatrixFactorization) { hp_names = new string[] { "regularization", "bias_reg" }; initial_hp_values = new DenseVector[] { // TODO reg_u and reg_i (in a second step?) new DenseVector(new double[] { 0.1, 0 }), new DenseVector(new double[] { 0.01, 0 }), new DenseVector(new double[] { 0.0001, 0 }), new DenseVector(new double[] { 0.00001, 0 }), new DenseVector(new double[] { 0.1, 0.0001 }), new DenseVector(new double[] { 0.01, 0.0001 }), new DenseVector(new double[] { 0.0001, 0.0001 }), new DenseVector(new double[] { 0.00001, 0.0001 }), }; } else if (recommender is MatrixFactorization) { // TODO normal interval search could be more efficient hp_names = new string[] { "regularization", }; initial_hp_values = new DenseVector[] { new DenseVector(new double[] { 0.1 }), new DenseVector(new double[] { 0.01 }), new DenseVector(new double[] { 0.0001 }), new DenseVector(new double[] { 0.00001 }), }; } // TODO kNN-based methods else { throw new Exception("not prepared for type " + recommender.GetType().ToString()); } return(FindMinimum( error_measure, hp_names, initial_hp_values, recommender, split)); }