static void DoTrack2() { TimeSpan seconds; if (find_iter != 0) { if (!(recommender_validate is IIterativeModel)) { Usage("Only iterative recommenders support find_iter."); } IIterativeModel iterative_recommender_validate = (IIterativeModel)recommender_validate; IIterativeModel iterative_recommender_final = (IIterativeModel)recommender_final; Console.WriteLine(); if (load_model_file == string.Empty) { recommender_validate.Train(); // TODO parallelize if (prediction_file != string.Empty) { recommender_final.Train(); } } // evaluate and display results double error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits); Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######} {1}", error, iterative_recommender_validate.NumIter)); for (int i = (int)iterative_recommender_validate.NumIter + 1; i <= max_iter; i++) { TimeSpan time = Utils.MeasureTime(delegate() { iterative_recommender_validate.Iterate(); // TODO parallelize if (prediction_file != string.Empty) { iterative_recommender_final.Iterate(); } }); training_time_stats.Add(time.TotalSeconds); if (i % find_iter == 0) { time = Utils.MeasureTime(delegate() { // TODO parallelize // evaluate error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits); err_eval_stats.Add(error); Console.WriteLine(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######} {1}", error, i)); if (prediction_file != string.Empty) { if (predict_score) { Console.Error.WriteLine("Predicting validation scores ..."); KDDCup.PredictScoresTrack2(recommender_validate, validation_candidates, prediction_file + "-validate-it-" + i); Console.Error.WriteLine("Predicting real scores ..."); KDDCup.PredictScoresTrack2(recommender_final, test_candidates, prediction_file + "-it-" + i); } else { KDDCup.PredictTrack2(recommender_validate, validation_candidates, prediction_file + "-validate-it-" + i); KDDCup.PredictTrack2(recommender_final, test_candidates, prediction_file + "-it-" + i); } } }); eval_time_stats.Add(time.TotalSeconds); if (save_model_file != string.Empty) { Recommender.SaveModel(recommender_validate, save_model_file + "-validate", i); if (prediction_file != string.Empty) { Recommender.SaveModel(recommender_final, save_model_file, i); } } if (err_eval_stats.Last() > err_cutoff) { Console.Error.WriteLine("Reached cutoff after {0} iterations.", i); break; } if (err_eval_stats.Last() > err_eval_stats.Min() + epsilon) { Console.Error.WriteLine(string.Format(CultureInfo.InvariantCulture, "Reached convergence (eps={0:0.######}) on training/validation data after {1} iterations.", epsilon, i)); break; } DisplayStats(); } } // for DisplayStats(); } else { if (load_model_file == string.Empty) { seconds = Utils.MeasureTime(delegate() { // TODO parallelize recommender_validate.Train(); if (prediction_file != string.Empty) { recommender_final.Train(); } }); Console.Write(" training_time " + seconds + " "); } seconds = Utils.MeasureTime(delegate() { // evaluate double error = KDDCup.EvaluateTrack2(recommender_validate, validation_candidates, validation_hits); Console.Write(string.Format(CultureInfo.InvariantCulture, "ERR {0:0.######}", error)); if (prediction_file != string.Empty) { if (predict_score) { KDDCup.PredictScoresTrack2(recommender_validate, validation_candidates, prediction_file + "-validate"); KDDCup.PredictScoresTrack2(recommender_final, test_candidates, prediction_file); } else { KDDCup.PredictTrack2(recommender_validate, validation_candidates, prediction_file + "-validate"); KDDCup.PredictTrack2(recommender_final, test_candidates, prediction_file); } } }); Console.Write(" evaluation_time " + seconds + " "); if (save_model_file != string.Empty) { Recommender.SaveModel(recommender_validate, save_model_file + "-validate"); if (prediction_file != string.Empty) { Recommender.SaveModel(recommender_final, save_model_file); } } } Console.WriteLine(); }
static double Eval(IList <double> scores, Dictionary <int, IList <int> > candidates, Dictionary <int, IList <int> > hits) { return(KDDCup.EvaluateTrack2(Scores2Predictions(scores), candidates, hits)); }
static double Eval(IList <byte> predictions, Dictionary <int, IList <int> > candidates, Dictionary <int, IList <int> > hits) { double result = KDDCup.EvaluateTrack2(predictions, candidates, hits); return(result); }
/// <summary>Parameters: num_files weight_1 .. weight_n file_1 .. file_n output_file</summary> /// <param name="args">the command-line arguments</param> public static void Main(string[] args) { AppDomain.CurrentDomain.UnhandledException += new UnhandledExceptionEventHandler(Handlers.UnhandledExceptionHandler); // parse command-line parameters string prediction_file = null; //string score_file = null; var p = new OptionSet() { { "data-dir=", v => data_dir = v }, { "prediction-file=", v => prediction_file = v }, { "sigmoid", v => sigmoid = v != null }, { "pairwise-probability", v => pairwise_prob = v != null }, { "pairwise-wins", v => pairwise_wins = v != null }, { "rated-probability", v => rated_prob = v != null }, { "constant-rating", v => constant_rating = v != null }, //{ "score-file=", v => score_file = v }, }; IList <string> extra_args = p.Parse(args); string rated_file = extra_args[0]; // combine files IList <double> test_scores; IList <double> validation_scores; if (constant_rating) { test_scores = ReadFile(rated_file); validation_scores = ReadFile(ValidationFilename(rated_file)); } else { string rating_file = extra_args[1]; test_scores = CombineFiles(rated_file, rating_file); validation_scores = CombineFiles(ValidationFilename(rated_file), ValidationFilename(rating_file)); } // compute error on validation set string validation_candidates_file = Path.Combine(data_dir, "mml-track2/validationCandidatesIdx2.txt"); string validation_hits_file = Path.Combine(data_dir, "mml-track2/validationHitsIdx2.txt"); var candidates = Track2Items.Read(validation_candidates_file); var hits = Track2Items.Read(validation_hits_file); double error = KDDCup.EvaluateTrack2(Decide(validation_scores), candidates, hits); Console.WriteLine("ERR {0:F7}", error); if (prediction_file != null) { WritePredictions(Decide(test_scores), prediction_file); WritePredictions(Decide(validation_scores), ValidationFilename(prediction_file)); } /* * if (score_file != null) * { * WriteScores(test_scores, score_file); * WriteScores(test_scores, ValidationFilename(score_file)); * } */ }