Maximum Likelihood learning algorithm for discrete-density Hidden Markov Models.

The maximum likelihood estimate is a supervised learning algorithm. It considers both the sequence of observations as well as the sequence of states in the Markov model are visible and thus during training.

Often, the Maximum Likelihood Estimate can be used to give a starting point to a unsupervised algorithm, making possible to use semi-supervised techniques with HMMs. It is possible, for example, to use MLE to guess initial values for an HMM given a small set of manually labeled labels, and then further estimate this model using the Viterbi learning algorithm.

상속: ISupervisedLearning
        public void RunTest()
        {
            // Example from
            // http://www.cs.columbia.edu/4761/notes07/chapter4.3-HMM.pdf

            

            int[][] observations = 
            {
                new int[] { 0,0,0,1,0,0 }, 
                new int[] { 1,0,0,1,0,0 },
                new int[] { 0,0,1,0,0,0 },
                new int[] { 0,0,0,0,1,0 },
                new int[] { 1,0,0,0,1,0 },
                new int[] { 0,0,0,1,1,0 },
                new int[] { 1,0,0,0,0,0 },
                new int[] { 1,0,1,0,0,0 },
            };

            int[][] paths =
            {
                new int[] { 0,0,1,0,1,0 },
                new int[] { 1,0,1,0,1,0 },
                new int[] { 1,0,0,1,1,0 },
                new int[] { 1,0,1,1,1,0 },
                new int[] { 1,0,0,1,0,1 },
                new int[] { 0,0,1,0,0,1 },
                new int[] { 0,0,1,1,0,1 },
                new int[] { 0,1,1,1,0,0 },
            };


            HiddenMarkovModel model = new HiddenMarkovModel(states: 2, symbols: 2);

            MaximumLikelihoodLearning target = new MaximumLikelihoodLearning(model);
            target.UseLaplaceRule = false;

            double logLikelihood = target.Run(observations, paths);

            var pi = Matrix.Exp(model.Probabilities);
            var A = Matrix.Exp(model.Transitions);
            var B = Matrix.Exp(model.Emissions);

            Assert.AreEqual(0.5, pi[0]);
            Assert.AreEqual(0.5, pi[1]);

            Assert.AreEqual(7 / 20.0, A[0, 0], 1e-5);
            Assert.AreEqual(13 / 20.0, A[0, 1], 1e-5);
            Assert.AreEqual(14 / 20.0, A[1, 0], 1e-5);
            Assert.AreEqual(6 / 20.0, A[1, 1], 1e-5);

            Assert.AreEqual(17 / 25.0, B[0, 0]);
            Assert.AreEqual(8 / 25.0, B[0, 1]);
            Assert.AreEqual(19 / 23.0, B[1, 0]);
            Assert.AreEqual(4 / 23.0, B[1, 1]);

            Assert.AreEqual(-1.1472359046136624, logLikelihood);
        }
예제 #2
0
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 ///
 public ViterbiLearning(HiddenMarkovModel model)
 {
     this.mle = new MaximumLikelihoodLearning(model);
 }
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 ///
 public ViterbiLearning(HiddenMarkovModel <TDistribution, TObservation> model)
 {
     this.mle = new MaximumLikelihoodLearning <TDistribution, TObservation>(model);
 }
예제 #4
0
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 /// 
 public ViterbiLearning(HiddenMarkovModel model)
 {
     this.convergence = new AbsoluteConvergence();
     this.mle = new MaximumLikelihoodLearning(model);
 }
예제 #5
0
 /// <summary>
 ///   Creates a new instance of the Viterbi learning algorithm.
 /// </summary>
 /// 
 public ViterbiLearning(HiddenMarkovModel model)
 {
     this.convergence = new AbsoluteConvergence();
     this.mle = new MaximumLikelihoodLearning(model);
 }
예제 #6
0
        public void Train(CompositionCategory cat)
        {
            Accord.Math.Tools.SetupGenerator(42);
            List<int[]> inputSequences = new List<int[]>();
            List<int[]> outputSequences = new List<int[]>();
            foreach(Composition comp in cat.Compositions)
            {
                if (comp.Tracks.Count < 2)
                    continue;

                var melInput = comp.Tracks[0].GetMainSequence() as MelodySequence; melInput.Trim(100); melInput.NormalizeNotes(4);
                var melOutput = comp.Tracks[1].GetMainSequence() as MelodySequence; melOutput.Trim(100); melOutput.NormalizeNotes(4);
                if (melInput.Length > melOutput.Length)
                    melInput.Trim(melOutput.Length);
                else if (melOutput.Length > melInput.Length)
                    melOutput.Trim(melInput.Length);

                book.Add(melInput.Notes); book.Add(melOutput.Notes);
                inputSequences.Add(book.ToCodes(melInput.ToArray()));
                outputSequences.Add(book.ToCodes(melOutput.ToArray()));
            }

            if (outputSequences.Count != inputSequences.Count)
                throw new Exception("MSP");
            for(int i = 0; i < outputSequences.Count; i++)
            {
                if (outputSequences[i].Length != inputSequences[i].Length)
                    throw new Exception("MSP 2");
            }

            var topology = new Forward(states: 50);

            hmm = new HiddenMarkovModel(20, book.TotalUniqueSymbols + 1);
            var teacher = new Accord.Statistics.Models.Markov.Learning.MaximumLikelihoodLearning(hmm) {UseLaplaceRule=false /*Tolerance = 0.1, Iterations=0*/};
            //var teacher = new ViterbiLearning(hmm);

                double ll = teacher.Run(outputSequences.ToArray(), inputSequences.ToArray());
                Console.WriteLine("Error: {0}", ll);
        }