コード例 #1
0
        public void RunTest()
        {
            // Example from
            // http://www.cs.columbia.edu/4761/notes07/chapter4.3-HMM.pdf



            int[][] observations =
            {
                new int[] { 0, 0, 0, 1, 0, 0 },
                new int[] { 1, 0, 0, 1, 0, 0 },
                new int[] { 0, 0, 1, 0, 0, 0 },
                new int[] { 0, 0, 0, 0, 1, 0 },
                new int[] { 1, 0, 0, 0, 1, 0 },
                new int[] { 0, 0, 0, 1, 1, 0 },
                new int[] { 1, 0, 0, 0, 0, 0 },
                new int[] { 1, 0, 1, 0, 0, 0 },
            };

            int[][] paths =
            {
                new int[] { 0, 0, 1, 0, 1, 0 },
                new int[] { 1, 0, 1, 0, 1, 0 },
                new int[] { 1, 0, 0, 1, 1, 0 },
                new int[] { 1, 0, 1, 1, 1, 0 },
                new int[] { 1, 0, 0, 1, 0, 1 },
                new int[] { 0, 0, 1, 0, 0, 1 },
                new int[] { 0, 0, 1, 1, 0, 1 },
                new int[] { 0, 1, 1, 1, 0, 0 },
            };


            HiddenMarkovModel model = new HiddenMarkovModel(states: 2, symbols: 2);

            MaximumLikelihoodLearning target = new MaximumLikelihoodLearning(model);

            target.UseLaplaceRule = false;

            double logLikelihood = target.Run(observations, paths);

            var pi = Matrix.Exp(model.Probabilities);
            var A  = Matrix.Exp(model.Transitions);
            var B  = Matrix.Exp(model.Emissions);

            Assert.AreEqual(0.5, pi[0]);
            Assert.AreEqual(0.5, pi[1]);

            Assert.AreEqual(7 / 20.0, A[0, 0], 1e-5);
            Assert.AreEqual(13 / 20.0, A[0, 1], 1e-5);
            Assert.AreEqual(14 / 20.0, A[1, 0], 1e-5);
            Assert.AreEqual(6 / 20.0, A[1, 1], 1e-5);

            Assert.AreEqual(17 / 25.0, B[0, 0]);
            Assert.AreEqual(8 / 25.0, B[0, 1]);
            Assert.AreEqual(19 / 23.0, B[1, 0]);
            Assert.AreEqual(4 / 23.0, B[1, 1]);

            Assert.AreEqual(-1.1472359046136624, logLikelihood);
        }
        public static void MaximumLikelihoodLearning()
        {
            int[][] observations =
            {
                new int[] { 0, 0, 0, 1, 0, 0 },
                new int[] { 1, 0, 0, 1, 0, 0 },
                new int[] { 0, 0, 1, 0, 0, 0 },
                new int[] { 0, 0, 0, 0, 1, 0 },
                new int[] { 1, 0, 0, 0, 1, 0 },
                new int[] { 0, 0, 0, 1, 1, 0 },
                new int[] { 1, 0, 0, 0, 0, 0 },
                new int[] { 1, 0, 1, 0, 0, 0 },
            };

            // Now those are the visible states associated with each observation in each
            // observation sequence above. Note that there is always one state assigned
            // to each observation, so the lengths of the sequence of observations and
            // the sequence of states must always match.

            int[][] paths =
            {
                new int[] { 0, 0, 1, 0, 1, 0 },
                new int[] { 1, 0, 1, 0, 1, 0 },
                new int[] { 1, 0, 0, 1, 1, 0 },
                new int[] { 1, 0, 1, 1, 1, 0 },
                new int[] { 1, 0, 0, 1, 0, 1 },
                new int[] { 0, 0, 1, 0, 0, 1 },
                new int[] { 0, 0, 1, 1, 0, 1 },
                new int[] { 0, 1, 1, 1, 0, 0 },
            };

            // Create our Markov model with two states (0, 1) and two symbols (0, 1)
            HiddenMarkovModel model = new HiddenMarkovModel(state_count: 2, symbol_count: 2);

            // Now we can create our learning algorithm
            MaximumLikelihoodLearning teacher = new MaximumLikelihoodLearning(model)
            {
                // Set some options
                UseLaplaceRule = false
            };

            // and finally learn a model using the algorithm
            double logLikelihood = teacher.Run(observations, paths);


            // To check what has been learned, we can extract the emission
            // and transition matrices, as well as the initial probability
            // vector from the HMM to compare against expected values:

            double[] pi = model.ProbabilityVector; // { 0.5, 0.5 }
            double[,] A = model.TransitionMatrix;  // { { 7/20, 13/20 }, { 14/20, 6/20 } }
            double[,] B = model.EmissionMatrix;    // { { 17/25, 8/25 }, { 19/23, 4/23 } }

            Console.WriteLine("pi: {{{0}}}", string.Join(", ", pi));
            Console.WriteLine("A: {0}", ToString(A));
            Console.WriteLine("B: {0}", ToString(B));
        }
コード例 #3
0
        public double Run(int[][] observations_db)
        {
            int K = observations_db.Length;

            double currLogLikelihood = Double.NegativeInfinity;

            for (int k = 0; k < K; ++k)
            {
                currLogLikelihood = LogHelper.LogSum(currLogLikelihood, mModel.Evaluate(observations_db[k]));
            }

            double oldLogLikelihood   = -1;
            double deltaLogLikelihood = -1;
            int    iteration          = 0;

            do
            {
                oldLogLikelihood = currLogLikelihood;

                int[][] paths_db = new int[K][];
                for (int k = 0; k < K; ++k)
                {
                    paths_db[k] = mModel.Decode(observations_db[k]);
                }

                mMaximumLikelihoodLearner.Run(observations_db, paths_db);

                currLogLikelihood = double.NegativeInfinity;
                for (int k = 0; k < K; ++k)
                {
                    currLogLikelihood = LogHelper.LogSum(currLogLikelihood, mModel.Evaluate(observations_db[k]));
                }

                deltaLogLikelihood = System.Math.Abs(currLogLikelihood - oldLogLikelihood);
                iteration++;
            }while(!ShouldTerminate(deltaLogLikelihood, iteration));

            return(currLogLikelihood);
        }