public Run ( int observations, int paths ) : double | ||
observations | int | An array of observation sequences to be used to train the model. |
paths | int | An array of state labels associated to each observation sequence. |
Результат | double |
/// <summary> /// Runs the learning algorithm. /// </summary> /// /// <remarks> /// Learning problem. Given some training observation sequences O = {o1, o2, ..., oK} /// and general structure of HMM (numbers of hidden and visible states), determine /// HMM parameters M = (A, B, pi) that best fit training data. /// </remarks> /// public double Run(params int[][] observations) { var model = mle.Model; convergence.Clear(); double logLikelihood = Double.NegativeInfinity; for (int i = 0; i < observations.Length; i++) logLikelihood = Special.LogSum(logLikelihood, model.Evaluate(observations[i])); double newLogLikelihood = Double.NegativeInfinity; do // Until convergence or max iterations is reached { logLikelihood = newLogLikelihood; // Compute the Viterbi path for all sequences int[][] paths = new int[observations.Length][]; for (int i = 0; i < observations.Length; i++) paths[i] = model.Decode(observations[i]); // Compute Maximum Likelihood Estimation mle.Run(observations, paths); // Compute log-likelihood newLogLikelihood = Double.NegativeInfinity; for (int i = 0; i < observations.Length; i++) newLogLikelihood = Special.LogSum(newLogLikelihood, model.Evaluate(observations[i])); // Check convergence convergence.NewValue = newLogLikelihood; } while (convergence.HasConverged); return newLogLikelihood; }
/// <summary> /// Runs one single epoch (iteration) of the learning algorithm. /// </summary> /// /// <param name="inputs">The observation sequences.</param> /// <param name="outputs">A vector to be populated with the decoded Viterbi sequences.</param> /// protected override void RunEpoch(int[][] inputs, int[][] outputs) { var model = mle.Model; // Compute the Viterbi path for all sequences for (int i = 0; i < inputs.Length; i++) { outputs[i] = model.Decode(inputs[i]); } // Compute Maximum Likelihood Estimation mle.Run(inputs, outputs); }
/// <summary> /// Runs the learning algorithm. /// </summary> /// /// <remarks> /// Learning problem. Given some training observation sequences O = {o1, o2, ..., oK} /// and general structure of HMM (numbers of hidden and visible states), determine /// HMM parameters M = (A, B, pi) that best fit training data. /// </remarks> /// public double Run(params Array[] observations) { var model = mle.Model; convergence.Clear(); // Convert the generic representation to a vector of multivariate sequences double[][][] vectorObservations = new double[observations.Length][][]; for (int i = 0; i < observations.Length; i++) { vectorObservations[i] = convert(observations[i], model.Dimension); } double logLikelihood = Double.NegativeInfinity; for (int i = 0; i < observations.Length; i++) { logLikelihood = Special.LogSum(logLikelihood, model.Evaluate(observations[i])); } double newLogLikelihood = Double.NegativeInfinity; do // Until convergence or max iterations is reached { logLikelihood = newLogLikelihood; // Compute the Viterbi path for all sequences int[][] paths = new int[observations.Length][]; for (int i = 0; i < observations.Length; i++) { paths[i] = model.Decode(vectorObservations[i]); } // Compute Maximum Likelihood Estimation mle.Run(vectorObservations, paths); // Compute log-likelihood newLogLikelihood = Double.NegativeInfinity; for (int i = 0; i < observations.Length; i++) { newLogLikelihood = Special.LogSum(newLogLikelihood, model.Evaluate(observations[i])); } // Check convergence convergence.NewValue = newLogLikelihood; } while (!convergence.HasConverged); return(newLogLikelihood); }
public void RunTest() { // Example from // http://www.cs.columbia.edu/4761/notes07/chapter4.3-HMM.pdf int[][] observations = { new int[] { 0,0,0,1,0,0 }, new int[] { 1,0,0,1,0,0 }, new int[] { 0,0,1,0,0,0 }, new int[] { 0,0,0,0,1,0 }, new int[] { 1,0,0,0,1,0 }, new int[] { 0,0,0,1,1,0 }, new int[] { 1,0,0,0,0,0 }, new int[] { 1,0,1,0,0,0 }, }; int[][] paths = { new int[] { 0,0,1,0,1,0 }, new int[] { 1,0,1,0,1,0 }, new int[] { 1,0,0,1,1,0 }, new int[] { 1,0,1,1,1,0 }, new int[] { 1,0,0,1,0,1 }, new int[] { 0,0,1,0,0,1 }, new int[] { 0,0,1,1,0,1 }, new int[] { 0,1,1,1,0,0 }, }; HiddenMarkovModel model = new HiddenMarkovModel(states: 2, symbols: 2); MaximumLikelihoodLearning target = new MaximumLikelihoodLearning(model); target.UseLaplaceRule = false; double logLikelihood = target.Run(observations, paths); var pi = Matrix.Exp(model.Probabilities); var A = Matrix.Exp(model.Transitions); var B = Matrix.Exp(model.Emissions); Assert.AreEqual(0.5, pi[0]); Assert.AreEqual(0.5, pi[1]); Assert.AreEqual(7 / 20.0, A[0, 0], 1e-5); Assert.AreEqual(13 / 20.0, A[0, 1], 1e-5); Assert.AreEqual(14 / 20.0, A[1, 0], 1e-5); Assert.AreEqual(6 / 20.0, A[1, 1], 1e-5); Assert.AreEqual(17 / 25.0, B[0, 0]); Assert.AreEqual(8 / 25.0, B[0, 1]); Assert.AreEqual(19 / 23.0, B[1, 0]); Assert.AreEqual(4 / 23.0, B[1, 1]); Assert.AreEqual(-1.1472359046136624, logLikelihood); }
public void Train(CompositionCategory cat) { Accord.Math.Tools.SetupGenerator(42); List<int[]> inputSequences = new List<int[]>(); List<int[]> outputSequences = new List<int[]>(); foreach(Composition comp in cat.Compositions) { if (comp.Tracks.Count < 2) continue; var melInput = comp.Tracks[0].GetMainSequence() as MelodySequence; melInput.Trim(100); melInput.NormalizeNotes(4); var melOutput = comp.Tracks[1].GetMainSequence() as MelodySequence; melOutput.Trim(100); melOutput.NormalizeNotes(4); if (melInput.Length > melOutput.Length) melInput.Trim(melOutput.Length); else if (melOutput.Length > melInput.Length) melOutput.Trim(melInput.Length); book.Add(melInput.Notes); book.Add(melOutput.Notes); inputSequences.Add(book.ToCodes(melInput.ToArray())); outputSequences.Add(book.ToCodes(melOutput.ToArray())); } if (outputSequences.Count != inputSequences.Count) throw new Exception("MSP"); for(int i = 0; i < outputSequences.Count; i++) { if (outputSequences[i].Length != inputSequences[i].Length) throw new Exception("MSP 2"); } var topology = new Forward(states: 50); hmm = new HiddenMarkovModel(20, book.TotalUniqueSymbols + 1); var teacher = new Accord.Statistics.Models.Markov.Learning.MaximumLikelihoodLearning(hmm) {UseLaplaceRule=false /*Tolerance = 0.1, Iterations=0*/}; //var teacher = new ViterbiLearning(hmm); double ll = teacher.Run(outputSequences.ToArray(), inputSequences.ToArray()); Console.WriteLine("Error: {0}", ll); }