public double Run(int[][] observations_db, int[] class_labels) { ValidationHelper.ValidateObservationDb(observations_db, 0, mClassifier.SymbolCount); int class_count = mClassifier.ClassCount; double[] logLikelihood = new double[class_count]; int K = class_labels.Length; DiagnosticsHelper.Assert(observations_db.Length == K); int[] class_label_counts = new int[class_count]; Parallel.For(0, class_count, i => { IUnsupervisedLearning teacher = mAlgorithmEntity(i); List <int> match_record_index_set = new List <int>(); for (int k = 0; k < K; ++k) { if (class_labels[k] == i) { match_record_index_set.Add(k); } } int K2 = match_record_index_set.Count; class_label_counts[i] = K2; if (K2 != 0) { int[][] observations_subdb = new int[K2][]; for (int k = 0; k < K2; ++k) { int record_index = match_record_index_set[k]; observations_subdb[k] = observations_db[record_index]; } logLikelihood[i] = teacher.Run(observations_subdb); } }); if (mEmpirical) { for (int i = 0; i < class_count; i++) { mClassifier.Priors[i] = (double)class_label_counts[i] / K; } } //if (mRejection) //{ // mClassifier.Threshold = Threshold(); //} return(logLikelihood.Sum()); }
/// <summary> /// Trains each model to recognize each of the output labels. /// </summary> /// <returns>The sum log-likelihood for all models after training.</returns> /// protected double Run<T>(T[] inputs, int[] outputs) { int classes = classifier.Classes; double[] logLikelihood = new double[classes]; // For each model, #if !DEBUG GABIZ.Base.Parallel.For(0, classes, i => #else for (int i = 0; i < classes; i++) #endif { // Select the input/output set corresponding // to the model's specialization class int[] inx = outputs.Find(y => y == i); T[] observations = inputs.Submatrix(inx); if (observations.Length > 0) { // Create and configure the learning algorithm IUnsupervisedLearning teacher = algorithm(i); // Train the current model in the input/output subset logLikelihood[i] = teacher.Run(observations as Array[]); } }
/// <summary> /// Trains each model to recognize each of the output labels. /// </summary> /// <returns>The sum log-likelihood for all models after training.</returns> /// protected double Run<T>(T[] inputs, int[] outputs) { if (inputs == null) throw new ArgumentNullException("inputs"); if (outputs == null) throw new ArgumentNullException("outputs"); if (inputs.Length != outputs.Length) throw new DimensionMismatchException("outputs", "The number of inputs and outputs does not match."); for (int i = 0; i < outputs.Length; i++) if (outputs[i] < 0 || outputs[i] >= Classifier.Classes) throw new ArgumentOutOfRangeException("outputs"); int classes = Classifier.Classes; double[] logLikelihood = new double[classes]; int[] classCounts = new int[classes]; // For each model, #if !DEBUG Parallel.For(0, classes, i => #else for (int i = 0; i < classes; i++) #endif { // We will start the class model learning problem var args = new GenerativeLearningEventArgs(i, classes); OnGenerativeClassModelLearningStarted(args); // Select the input/output set corresponding // to the model's specialization class int[] inx = outputs.Find(y => y == i); T[] observations = inputs.Submatrix(inx); classCounts[i] = observations.Length; if (observations.Length > 0) { // Create and configure the learning algorithm IUnsupervisedLearning teacher = Algorithm(i); // Train the current model in the input/output subset logLikelihood[i] = teacher.Run(observations as Array[]); } // Update and report progress OnGenerativeClassModelLearningFinished(args); }
protected double Run <T>(T[] inputs, int[] outputs) { if (inputs == null) { throw new ArgumentNullException("inputs"); } if (outputs == null) { throw new ArgumentNullException("outputs"); } if (inputs.Length != outputs.Length) { throw new DimensionMismatchException("outputs", "The number of inputs and outputs does not match."); } for (int i = 0; i < outputs.Length; i++) { if (outputs[i] < 0 || outputs[i] >= Classifier.Classes) { throw new ArgumentOutOfRangeException("outputs"); } } int classes = Classifier.Classes; double[] logLikelihood = new double[classes]; int[] classCounts = new int[classes]; // For each model, Parallel.For(0, classes, i => { // We will start the class model learning problem var args = new GenerativeLearningEventArgs(i, classes); OnGenerativeClassModelLearningStarted(args); // Select the input/output set corresponding // to the model's specialization class int[] inx = outputs.Find(y => y == i); T[] observations = inputs.Get(inx); classCounts[i] = observations.Length; if (observations.Length > 0) { // Create and configure the learning algorithm IUnsupervisedLearning teacher = Algorithm(i); // Train the current model in the input/output subset logLikelihood[i] = teacher.Run(observations as Array[]); } // Update and report progress OnGenerativeClassModelLearningFinished(args); }); if (Empirical) { for (int i = 0; i < classes; i++) { Classifier.Priors[i] = (double)classCounts[i] / inputs.Length; } } if (Rejection) { Classifier.Threshold = Threshold(); } // Returns the sum log-likelihood for all models. return(logLikelihood.Sum()); }