/// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     IterativeRoot = Root as IIterativeModel;
     if (ApplyTimeBlending && IterativeRoot == null)
     {
         error = "In '' the option Apply Time Blending is selected however the model system is not an compatible with IIterativeModel!";
         return(false);
     }
     return(true);
 }
Пример #2
0
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     this.Loaded = false;
     this.IterativeRoot = this.Root as IIterativeModel;
     return true;
 }
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     IterativeRoot = Root as IIterativeModel;
     return(true);
 }
Пример #4
0
    static void DoTrack2()
    {
        TimeSpan seconds;

        if (find_iter != 0)
        {
            if (!(recommender_validate is IIterativeModel))
            {
                Usage("Only iterative recommenders support find_iter.");
            }

            IIterativeModel iterative_recommender_validate = (IIterativeModel)recommender_validate;
            IIterativeModel iterative_recommender_final    = (IIterativeModel)recommender_final;
            Console.WriteLine();

            if (load_model_file == string.Empty)
            {
                recommender_validate.Train();                 // TODO parallelize
                if (prediction_file != string.Empty)
                {
                    recommender_final.Train();
                }
            }

            // evaluate and display results
            double error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits);
            Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######} {1}", error, iterative_recommender_validate.NumIter));

            for (int i = (int)iterative_recommender_validate.NumIter + 1; i <= max_iter; i++)
            {
                TimeSpan time = Utils.MeasureTime(delegate() {
                    iterative_recommender_validate.Iterate();                     // TODO parallelize
                    if (prediction_file != string.Empty)
                    {
                        iterative_recommender_final.Iterate();
                    }
                });
                training_time_stats.Add(time.TotalSeconds);

                if (i % find_iter == 0)
                {
                    time = Utils.MeasureTime(delegate() {                     // TODO parallelize
                        // evaluate
                        error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits);
                        err_eval_stats.Add(error);
                        Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######} {1}", error, i));

                        if (prediction_file != string.Empty)
                        {
                            if (predict_score)
                            {
                                Console.Error.WriteLine("Predicting validation scores ...");
                                KDDCup.PredictScoresTrack2(recommender_validate, validation_candidates, prediction_file + "-validate-it-" + i);
                                Console.Error.WriteLine("Predicting real scores ...");
                                KDDCup.PredictScoresTrack2(recommender_final, test_candidates, prediction_file + "-it-" + i);
                            }
                            else
                            {
                                KDDCup.PredictTrack2(recommender_validate, validation_candidates, prediction_file + "-validate-it-" + i);
                                KDDCup.PredictTrack2(recommender_final, test_candidates, prediction_file + "-it-" + i);
                            }
                        }
                    });
                    eval_time_stats.Add(time.TotalSeconds);

                    if (save_model_file != string.Empty)
                    {
                        Recommender.SaveModel(recommender_validate, save_model_file + "-validate", i);
                        if (prediction_file != string.Empty)
                        {
                            Recommender.SaveModel(recommender_final, save_model_file, i);
                        }
                    }

                    if (err_eval_stats.Last() > err_cutoff)
                    {
                        Console.Error.WriteLine("Reached cutoff after {0} iterations.", i);
                        break;
                    }

                    if (err_eval_stats.Last() > err_eval_stats.Min() + epsilon)
                    {
                        Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "Reached convergence (eps={0:0.######}) on training/validation data after {1} iterations.", epsilon, i));
                        break;
                    }

                    DisplayStats();
                }
            }             // for

            DisplayStats();
        }
        else
        {
            if (load_model_file == string.Empty)
            {
                seconds = Utils.MeasureTime(delegate() {                 // TODO parallelize
                    recommender_validate.Train();
                    if (prediction_file != string.Empty)
                    {
                        recommender_final.Train();
                    }
                });
                Console.Write(" training_time " + seconds + " ");
            }

            seconds = Utils.MeasureTime(delegate() {
                // evaluate
                double error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits);
                Console.Write(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######}", error));

                if (prediction_file != string.Empty)
                {
                    if (predict_score)
                    {
                        KDDCup.PredictScoresTrack2(recommender_validate, validation_candidates, prediction_file + "-validate");
                        KDDCup.PredictScoresTrack2(recommender_final, test_candidates, prediction_file);
                    }
                    else
                    {
                        KDDCup.PredictTrack2(recommender_validate, validation_candidates, prediction_file + "-validate");
                        KDDCup.PredictTrack2(recommender_final, test_candidates, prediction_file);
                    }
                }
            });
            Console.Write(" evaluation_time " + seconds + " ");

            if (save_model_file != string.Empty)
            {
                Recommender.SaveModel(recommender_validate, save_model_file + "-validate");
                if (prediction_file != string.Empty)
                {
                    Recommender.SaveModel(recommender_final, save_model_file);
                }
            }
        }

        Console.WriteLine();
    }
Пример #5
0
        /// <summary>Evaluate an iterative recommender on the folds of a dataset split, display results on STDOUT</summary>
        /// <param name="recommender">an item recommender</param>
        /// <param name="split">a positive-only feedback dataset split</param>
        /// <param name="test_users">a collection of integers with all test users</param>
        /// <param name="candidate_items">a collection of integers with all candidate items</param>
        /// <param name="candidate_item_mode">the mode used to determine the candidate items</param>
        /// <param name="repeated_events">allow repeated events in the evaluation (i.e. items accessed by a user before may be in the recommended list)</param>
        /// <param name="max_iter">the maximum number of iterations</param>
        /// <param name="find_iter">the report interval</param>
        /// <param name="show_fold_results">if set to true to print per-fold results to STDERR</param>
        public static void DoIterativeCrossValidation(
			this IRecommender recommender,
			ISplit<IPosOnlyFeedback> split,
			IList<int> test_users,
			IList<int> candidate_items,
			CandidateItems candidate_item_mode,
			RepeatedEvents repeated_events,
			uint max_iter,
			uint find_iter = 1,
			bool show_fold_results = false)
        {
            if (!(recommender is IIterativeModel))
                throw new ArgumentException("recommender must be of type IIterativeModel");
            if (!(recommender is ItemRecommender))
                throw new ArgumentException("recommender must be of type ItemRecommender");

            var split_recommenders     = new ItemRecommender[split.NumberOfFolds];
            var iterative_recommenders = new IIterativeModel[split.NumberOfFolds];
            var fold_results = new ItemRecommendationEvaluationResults[split.NumberOfFolds];

            // initial training and evaluation
            Parallel.For(0, (int) split.NumberOfFolds, i =>
            {
                try
                {
                    split_recommenders[i] = (ItemRecommender) recommender.Clone(); // to avoid changes in recommender
                    split_recommenders[i].Feedback = split.Train[i];
                    split_recommenders[i].Train();
                    iterative_recommenders[i] = (IIterativeModel) split_recommenders[i];
                    fold_results[i] = Items.Evaluate(split_recommenders[i], split.Test[i], split.Train[i], test_users, candidate_items, candidate_item_mode, repeated_events);
                    if (show_fold_results)
                        Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, iterative_recommenders[i].NumIter);
                }
                catch (Exception e)
                {
                    Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                    throw;
                }
            });
            Console.WriteLine("{0} iteration {1}", new ItemRecommendationEvaluationResults(fold_results), iterative_recommenders[0].NumIter);

            // iterative training and evaluation
            for (int it = (int) iterative_recommenders[0].NumIter + 1; it <= max_iter; it++)
            {
                Parallel.For(0, (int) split.NumberOfFolds, i =>
                {
                    try
                    {
                        iterative_recommenders[i].Iterate();

                        if (it % find_iter == 0)
                        {
                            fold_results[i] = Items.Evaluate(split_recommenders[i], split.Test[i], split.Train[i], test_users, candidate_items, candidate_item_mode, repeated_events);
                            if (show_fold_results)
                                Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, it);
                        }
                    }
                    catch (Exception e)
                    {
                        Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                        throw;
                    }
                });
                Console.WriteLine("{0} iteration {1}", new ItemRecommendationEvaluationResults(fold_results), it);
            }
        }
Пример #6
0
        /// <summary>Evaluate an iterative recommender on the folds of a dataset split, display results on STDOUT</summary>
        /// <param name="recommender">a rating predictor</param>
        /// <param name="split">a rating dataset split</param>
        /// <param name="max_iter">the maximum number of iterations</param>
        /// <param name="find_iter">the report interval</param>
        /// <param name="show_fold_results">if set to true to print per-fold results to STDERR</param>
        public static void DoIterativeCrossValidation(
			this RatingPredictor recommender,
			ISplit<IRatings> split,
			int max_iter,
			int find_iter = 1,
			bool show_fold_results = false)
        {
            if (!(recommender is IIterativeModel))
                throw new ArgumentException("recommender must be of type IIterativeModel");

            var split_recommenders     = new RatingPredictor[split.NumberOfFolds];
            var iterative_recommenders = new IIterativeModel[split.NumberOfFolds];
            var fold_results = new RatingPredictionEvaluationResults[split.NumberOfFolds];

            // initial training and evaluation
            Parallel.For(0, (int) split.NumberOfFolds, i =>
            {
                try
                {
                    split_recommenders[i] = (RatingPredictor) recommender.Clone(); // to avoid changes in recommender
                    split_recommenders[i].Ratings = split.Train[i];
                    if (recommender is ITransductiveRatingPredictor)
                        ((ITransductiveRatingPredictor) split_recommenders[i]).AdditionalFeedback = split.Test[i];
                    split_recommenders[i].Train();
                    iterative_recommenders[i] = (IIterativeModel) split_recommenders[i];
                    fold_results[i] = Ratings.Evaluate(split_recommenders[i], split.Test[i]);

                    if (show_fold_results)
                        Console.Error.WriteLine("fold {0} {1} iteration {2}", i, fold_results[i], iterative_recommenders[i].NumIter);
                }
                catch (Exception e)
                {
                    Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                    throw e;
                }
            });
            Console.WriteLine("{0} iteration {1}", new RatingPredictionEvaluationResults(fold_results), iterative_recommenders[0].NumIter);

            // iterative training and evaluation
            for (int it = (int) iterative_recommenders[0].NumIter + 1; it <= max_iter; it++)
            {
                Parallel.For(0, (int) split.NumberOfFolds, i =>
                {
                    try
                    {
                        iterative_recommenders[i].Iterate();

                        if (it % find_iter == 0)
                        {
                            fold_results[i] = Ratings.Evaluate(split_recommenders[i], split.Test[i]);
                            if (show_fold_results)
                                Console.Error.WriteLine("fold {0} {1} iteration {2}", i, fold_results[i], it);
                        }
                    }
                    catch (Exception e)
                    {
                        Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                        throw e;
                    }
                });
                Console.WriteLine("{0} iteration {1}", new RatingPredictionEvaluationResults(fold_results), it);
            }
        }
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     IterativeRoot = Root as IIterativeModel;
     if(ApplyTimeBlending && IterativeRoot == null)
     {
         error = "In '' the option Apply Time Blending is selected however the model system is not an compatible with IIterativeModel!";
         return false;
     }
     return true;
 }
Пример #8
0
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     // if we are attached to an iterative model load it in
     this.IterativeRoot = this.Root as IIterativeModel;
     return true;
 }
Пример #9
0
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     // if we are attached to an iterative model load it in
     IterativeRoot = Root as IIterativeModel;
     return(true);
 }
        /// <summary>Evaluate an iterative recommender on the folds of a dataset split, display results on STDOUT</summary>
        /// <param name="recommender">a rating predictor</param>
        /// <param name="split">a rating dataset split</param>
        /// <param name="max_iter">the maximum number of iterations</param>
        /// <param name="find_iter">the report interval</param>
        /// <param name="show_fold_results">if set to true to print per-fold results to STDERR</param>
        static public void DoIterativeCrossValidation(
            this RatingPredictor recommender,
            ISplit <IRatings> split,
            uint max_iter,
            uint find_iter         = 1,
            bool show_fold_results = false)
        {
            if (!(recommender is IIterativeModel))
            {
                throw new ArgumentException("recommender must be of type IIterativeModel");
            }

            var split_recommenders     = new RatingPredictor[split.NumberOfFolds];
            var iterative_recommenders = new IIterativeModel[split.NumberOfFolds];
            var fold_results           = new RatingPredictionEvaluationResults[split.NumberOfFolds];

            // initial training and evaluation
            Parallel.For(0, (int)split.NumberOfFolds, i =>
            {
                try
                {
                    split_recommenders[i]         = (RatingPredictor)recommender.Clone();              // to avoid changes in recommender
                    split_recommenders[i].Ratings = split.Train[i];
                    if (recommender is ITransductiveRatingPredictor)
                    {
                        ((ITransductiveRatingPredictor)split_recommenders[i]).AdditionalFeedback = split.Test[i];
                    }
                    split_recommenders[i].Train();
                    iterative_recommenders[i] = (IIterativeModel)split_recommenders[i];
                    fold_results[i]           = Ratings.Evaluate(split_recommenders[i], split.Test[i]);

                    if (show_fold_results)
                    {
                        Console.Error.WriteLine("fold {0} {1} iteration {2}", i, fold_results[i], iterative_recommenders[i].NumIter);
                    }
                }
                catch (Exception e)
                {
                    Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                    throw;
                }
            });
            Console.WriteLine("{0} iteration {1}", new RatingPredictionEvaluationResults(fold_results), iterative_recommenders[0].NumIter);

            // iterative training and evaluation
            for (int it = (int)iterative_recommenders[0].NumIter + 1; it <= max_iter; it++)
            {
                Parallel.For(0, (int)split.NumberOfFolds, i =>
                {
                    try
                    {
                        iterative_recommenders[i].Iterate();

                        if (it % find_iter == 0)
                        {
                            fold_results[i] = Ratings.Evaluate(split_recommenders[i], split.Test[i]);
                            if (show_fold_results)
                            {
                                Console.Error.WriteLine("fold {0} {1} iteration {2}", i, fold_results[i], it);
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                        throw;
                    }
                });
                Console.WriteLine("{0} iteration {1}", new RatingPredictionEvaluationResults(fold_results), it);
            }
        }
Пример #11
0
    static void DoTrack1()
    {
        var rating_predictor_validate = recommender as RatingPredictor;
        var rating_predictor_final    = rating_predictor_validate.Clone() as RatingPredictor;

        rating_predictor_final.Ratings = complete_ratings;

        Console.WriteLine("Validation split:");
        Utils.DisplayDataStats(training_ratings, validation_ratings, rating_predictor_validate);
        Console.WriteLine("Test split:");
        Utils.DisplayDataStats(complete_ratings, test_data, rating_predictor_final);

        if (find_iter != 0)
        {
            if (!(recommender is IIterativeModel))
            {
                Usage("Only iterative recommenders support find_iter.");
            }
            IIterativeModel iterative_recommender_validate = (MatrixFactorization)rating_predictor_validate;
            IIterativeModel iterative_recommender_final    = (MatrixFactorization)rating_predictor_final;
            Console.WriteLine(recommender.ToString() + " ");

            if (load_model_file == string.Empty)
            {
                iterative_recommender_validate.Train();
                iterative_recommender_final.Train();
            }
            else
            {
                Recommender.LoadModel(rating_predictor_final, "final-" + load_model_file);
            }

            if (compute_fit)
            {
                Console.Write(string.Format(CultureInfo.InvariantCulture, "fit {0:0.#####} ", iterative_recommender_validate.ComputeFit()));
            }

            MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.Evaluate(rating_predictor_validate, validation_ratings));
            Console.WriteLine(" " + iterative_recommender_validate.NumIter);

            for (int i = (int)iterative_recommender_validate.NumIter + 1; i <= max_iter; i++)
            {
                TimeSpan time = Utils.MeasureTime(delegate() {
                    iterative_recommender_validate.Iterate();

                    iterative_recommender_final.Iterate();                     // TODO parallelize this
                });
                training_time_stats.Add(time.TotalSeconds);


                if (i % find_iter == 0)
                {
                    if (compute_fit)
                    {
                        double fit = 0;
                        time = Utils.MeasureTime(delegate() {
                            fit = iterative_recommender_validate.ComputeFit();
                        });
                        fit_time_stats.Add(time.TotalSeconds);
                        Console.Write(string.Format(CultureInfo.InvariantCulture, "fit {0:0.#####} ", fit));
                    }

                    // evaluate and save stats
                    // TODO parallelize
                    Dictionary <string, double> results = null;
                    time = Utils.MeasureTime(delegate() {
                        results = MyMediaLite.Eval.Ratings.Evaluate(rating_predictor_validate, validation_ratings);
                        MyMediaLite.Eval.Ratings.DisplayResults(results);
                        rmse_eval_stats.Add(results["RMSE"]);
                        Console.WriteLine(" " + i);
                    });
                    eval_time_stats.Add(time.TotalSeconds);


                    // write out model files and predictions
                    if (save_model_file != string.Empty)
                    {
                        Recommender.SaveModel(rating_predictor_validate, save_model_file + "-validate", i);
                        Recommender.SaveModel(rating_predictor_final, save_model_file, i);
                    }
                    if (prediction_file != string.Empty)
                    {
                        if (track2)
                        {
                            KDDCup.PredictRatingsDouble(rating_predictor_validate, validation_candidates, prediction_file + "-validate-it-" + i);
                            KDDCup.PredictRatingsDouble(rating_predictor_final, test_data, prediction_file + "-it-" + i);
                        }
                        else
                        {
                            KDDCup.PredictRatings(rating_predictor_validate, validation_ratings, prediction_file + "-validate-it-" + i);
                            KDDCup.PredictRatings(rating_predictor_final, test_data, prediction_file + "-it-" + i);
                        }
                    }

                    // check whether we should abort
                    if (epsilon > 0 && results["RMSE"] > rmse_eval_stats.Min() + epsilon)
                    {
                        Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "{0} >> {1}", results["RMSE"], rmse_eval_stats.Min()));
                        Console.Error.WriteLine("Reached convergence on training/validation data after {0} iterations.", i);
                        break;
                    }
                    if (results["RMSE"] > rmse_cutoff || results["MAE"] > mae_cutoff)
                    {
                        Console.Error.WriteLine("Reached cutoff after {0} iterations.", i);
                        break;
                    }
                }
            }             // for

            DisplayIterationStats();
            Recommender.SaveModel(recommender, save_model_file);
        }
        else
        {
            TimeSpan seconds;

            if (!no_eval)
            {
                if (load_model_file == string.Empty)
                {
                    Console.Write(recommender.ToString());
                    if (cross_validation > 0)                     // TODO cross-validation could also be performed on the complete dataset
                    {                                             // TODO support track2
                        Console.WriteLine();
                        var split   = new RatingCrossValidationSplit(training_ratings, cross_validation);
                        var results = MyMediaLite.Eval.Ratings.EvaluateOnSplit(rating_predictor_validate, split);
                        MyMediaLite.Eval.Ratings.DisplayResults(results);
                        no_eval = true;
                        rating_predictor_validate.Ratings = training_ratings;
                    }
                    else
                    {
                        seconds = Utils.MeasureTime(delegate() { recommender.Train(); });
                        Console.Write(" training_time " + seconds + " ");
                        Recommender.SaveModel(recommender, save_model_file);
                    }
                }

                Console.Write(recommender.ToString() + " ");

                seconds = Utils.MeasureTime(
                    delegate() { MyMediaLite.Eval.Ratings.DisplayResults(MyMediaLite.Eval.Ratings.Evaluate(rating_predictor_validate, validation_ratings)); }
                    );
                Console.Write(" testing_time " + seconds);
            }

            Console.WriteLine();

            if (prediction_file != string.Empty)
            {
                Console.WriteLine("Prediction for KDD Cup Track 1:");
                seconds = Utils.MeasureTime(delegate() { rating_predictor_final.Train(); });
                Console.Write(" training_time " + seconds + " ");
                if (save_model_file != string.Empty)
                {
                    Recommender.SaveModel(rating_predictor_validate, save_model_file + "-validate");
                    Recommender.SaveModel(rating_predictor_final, save_model_file);
                }

                Console.WriteLine();
                seconds = Utils.MeasureTime(delegate() {
                    KDDCup.PredictRatingsDouble(rating_predictor_final, test_data, prediction_file);

                    if (track2)
                    {
                        KDDCup.PredictRatingsDouble(rating_predictor_validate, validation_candidates, prediction_file + "-validate");
                    }
                    else
                    {
                        KDDCup.PredictRatings(rating_predictor_validate, validation_ratings, prediction_file + "-validate");
                    }
                });
                Console.Error.WriteLine("predicting_time " + seconds);
            }
        }
    }
        /// <summary>Evaluate an iterative recommender on the folds of a dataset split, display results on STDOUT</summary>
        /// <param name="recommender">an item recommender</param>
        /// <param name="split">a positive-only feedback dataset split</param>
        /// <param name="test_users">a collection of integers with all test users</param>
        /// <param name="candidate_items">a collection of integers with all candidate items</param>
        /// <param name="candidate_item_mode">the mode used to determine the candidate items</param>
        /// <param name="repeated_events">allow repeated events in the evaluation (i.e. items accessed by a user before may be in the recommended list)</param>
        /// <param name="max_iter">the maximum number of iterations</param>
        /// <param name="find_iter">the report interval</param>
        /// <param name="show_fold_results">if set to true to print per-fold results to STDERR</param>
        static public void DoRatingBasedRankingIterativeCrossValidation(
            this RatingPredictor recommender,
            ISplit <IRatings> split,
            IList <int> test_users,
            IList <int> candidate_items,
            CandidateItems candidate_item_mode,
            RepeatedEvents repeated_events,
            uint max_iter,
            uint find_iter         = 1,
            bool show_fold_results = false)
        {
            if (!(recommender is IIterativeModel))
            {
                throw new ArgumentException("recommender must be of type IIterativeModel");
            }

            var split_recommenders     = new RatingPredictor[split.NumberOfFolds];
            var iterative_recommenders = new IIterativeModel[split.NumberOfFolds];
            var fold_results           = new ItemRecommendationEvaluationResults[split.NumberOfFolds];

            // initial training and evaluation
            Parallel.For(0, (int)split.NumberOfFolds, i =>
            {
                try
                {
                    split_recommenders[i]         = (RatingPredictor)recommender.Clone();              // to avoid changes in recommender
                    split_recommenders[i].Ratings = split.Train[i];
                    split_recommenders[i].Train();
                    iterative_recommenders[i] = (IIterativeModel)split_recommenders[i];

                    var test_data_posonly     = new PosOnlyFeedback <SparseBooleanMatrix>(split.Test[i]);
                    var training_data_posonly = new PosOnlyFeedback <SparseBooleanMatrix>(split.Train[i]);
                    fold_results[i]           = Items.Evaluate(split_recommenders[i], test_data_posonly, training_data_posonly, test_users, candidate_items, candidate_item_mode, repeated_events);
                    if (show_fold_results)
                    {
                        Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, iterative_recommenders[i].NumIter);
                    }
                }
                catch (Exception e)
                {
                    Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                    throw;
                }
            });
            Console.WriteLine("{0} iteration {1}", new ItemRecommendationEvaluationResults(fold_results), iterative_recommenders[0].NumIter);

            // iterative training and evaluation
            for (int it = (int)iterative_recommenders[0].NumIter + 1; it <= max_iter; it++)
            {
                Parallel.For(0, (int)split.NumberOfFolds, i =>
                {
                    try
                    {
                        iterative_recommenders[i].Iterate();

                        if (it % find_iter == 0)
                        {
                            var test_data_posonly     = new PosOnlyFeedback <SparseBooleanMatrix>(split.Test[i]);
                            var training_data_posonly = new PosOnlyFeedback <SparseBooleanMatrix>(split.Train[i]);

                            fold_results[i] = Items.Evaluate(split_recommenders[i], test_data_posonly, training_data_posonly, test_users, candidate_items, candidate_item_mode, repeated_events);
                            if (show_fold_results)
                            {
                                Console.WriteLine("fold {0} {1} iteration {2}", i, fold_results, it);
                            }
                        }
                    }
                    catch (Exception e)
                    {
                        Console.Error.WriteLine("===> ERROR: " + e.Message + e.StackTrace);
                        throw;
                    }
                });
                Console.WriteLine("{0} iteration {1}", new ItemRecommendationEvaluationResults(fold_results), it);
            }
        }
Пример #13
0
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     this.Loaded        = false;
     this.IterativeRoot = this.Root as IIterativeModel;
     return(true);
 }
 /// <summary>
 /// This is called before the start method as a way to pre-check that all of the parameters that are selected
 /// are in fact valid for this module.
 /// </summary>
 /// <param name="error">A string that should be assigned a detailed error</param>
 /// <returns>If the validation was successful or if there was a problem</returns>
 public bool RuntimeValidation(ref string error)
 {
     IterativeRoot = Root as IIterativeModel;
     return true;
 }