Internal methods for validation and other shared functions.
Ejemplo n.º 1
0
        /// <summary>
        ///   Predicts the next observation occurring after a given observation sequence.
        /// </summary>
        ///
        public double[] Predict(double[] observations, int next, out double logLikelihood)
        {
            if (multivariate)
            {
                throw new ArgumentException("Model is multivariate.", "observations");
            }

            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }


            // Convert to multivariate observations
            double[][] obs = MarkovHelperMethods.convertNoCheck(observations, dimension);

            // Matrix to store the probabilities in assuming the next
            // observations (prediction) will belong to each state.
            double[][] weights;

            // Compute the next observations
            double[][] prediction = predict(obs, next, out logLikelihood, out weights);

            // Return the first (single) dimension of the next observations.
            return(Accord.Math.Matrix.Concatenate(prediction));
        }
Ejemplo n.º 2
0
        /// <summary>
        ///   Calculates the log-likelihood that this model has generated the
        ///   given observation sequence along the given state path.
        /// </summary>
        ///
        /// <param name="observations">A sequence of observations. </param>
        /// <param name="path">A sequence of states. </param>
        ///
        /// <returns>
        ///   The log-likelihood that the given sequence of observations has
        ///   been generated by this model along the given sequence of states.
        /// </returns>
        ///
        public double Evaluate(Array observations, int[] path)
        {
            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            if (path == null)
            {
                throw new ArgumentNullException("path");
            }

            if (observations.Length == 0)
            {
                return(Double.NegativeInfinity);
            }


            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);


            double logLikelihood = Probabilities[path[0]]
                                   + Emissions[path[0]].LogProbabilityFunction(x[0]);

            for (int i = 1; i < observations.Length; i++)
            {
                logLikelihood = Accord.Math.Special.LogSum(logLikelihood, Transitions[path[i - 1],
                                                                                      path[i]] + Emissions[path[i]].LogProbabilityFunction(x[i]));
            }

            // Return the sequence probability
            return(logLikelihood);
        }
Ejemplo n.º 3
0
        /// <summary>
        ///   Predicts the next observation occurring after a given observation sequence.
        /// </summary>
        ///
        public double Predict(double[] observations, out double logLikelihood)
        {
            if (multivariate)
            {
                throw new ArgumentException("Model is multivariate.", "observations");
            }

            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }


            // Convert to multivariate observations
            double[][] obs = MarkovHelperMethods.convertNoCheck(observations, dimension);

            // Matrix to store the probabilities in assuming the next
            // observations (prediction) will belong to each state.
            double[][] weights;

            // Compute the next observation (currently only one ahead is supported).
            double[][] prediction = predict(obs, 1, out logLikelihood, out weights);

            return(prediction[0][0]);
        }
Ejemplo n.º 4
0
        /// <summary>
        ///   Calculates the likelihood that this model has generated the given sequence.
        /// </summary>
        ///
        /// <remarks>
        ///   Evaluation problem. Given the HMM  M = (A, B, pi) and  the observation
        ///   sequence O = {o1, o2, ..., oK}, calculate the probability that model
        ///   M has generated sequence O. This can be computed efficiently using the
        ///   either the Viterbi or the Forward algorithms.
        /// </remarks>
        ///
        /// <param name="observations">
        ///   A sequence of observations.
        /// </param>
        /// <returns>
        ///   The log-likelihood that the given sequence has been generated by this model.
        /// </returns>
        ///
        public double Evaluate(Array observations)
        {
            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            if (observations.Length == 0)
            {
                return(Double.NegativeInfinity);
            }


            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);


            // Forward algorithm
            double logLikelihood;

            // Compute forward probabilities
            ForwardBackwardAlgorithm.LogForward(this, x, out logLikelihood);

            // Return the sequence probability
            return(logLikelihood);
        }
Ejemplo n.º 5
0
        /// <summary>
        ///   Calculates the probability of each hidden state for each
        ///   observation in the observation vector.
        /// </summary>
        ///
        /// <remarks>
        ///   If there are 3 states in the model, and the <paramref name="observations"/>
        ///   array contains 5 elements, the resulting vector will contain 5 vectors of
        ///   size 3 each. Each vector of size 3 will contain probability values that sum
        ///   up to one. By following those probabilities in order, we may decode those
        ///   probabilities into a sequence of most likely states. However, the sequence
        ///   of obtained states may not be valid in the model.
        /// </remarks>
        ///
        /// <param name="observations">A sequence of observations.</param>
        ///
        /// <returns>A vector of the same size as the observation vectors, containing
        ///  the probabilities for each state in the model for the current observation.
        ///  If there are 3 states in the model, and the <paramref name="observations"/>
        ///  array contains 5 elements, the resulting vector will contain 5 vectors of
        ///  size 3 each. Each vector of size 3 will contain probability values that sum
        ///  up to one.</returns>
        ///
        public double[][] Posterior(Array observations)
        {
            // Reference: C. S. Foo, CS262 Winter 2007, Lecture 5, Stanford
            // http://ai.stanford.edu/~serafim/CS262_2007/notes/lecture5.pdf

            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);

            double logLikelihood;

            // Compute forward and backward probabilities
            double[,] lnFwd = ForwardBackwardAlgorithm.LogForward(this, x, out logLikelihood);
            double[,] lnBwd = ForwardBackwardAlgorithm.LogBackward(this, x);

            double[][] probabilities = new double[observations.Length][];

            for (int i = 0; i < probabilities.Length; i++)
            {
                double[] states = probabilities[i] = new double[States];

                for (int j = 0; j < states.Length; j++)
                {
                    states[j] = Math.Exp(lnFwd[i, j] + lnBwd[i, j] - logLikelihood);
                }
            }

            return(probabilities);
        }
Ejemplo n.º 6
0
        internal static void CheckObservationDimensions <TObservation, TDistribution>(TObservation[][] x, HiddenMarkovModel <TDistribution, TObservation> hmm) where TDistribution : IFittableDistribution <TObservation>
        {
            int expected = MarkovHelperMethods.GetObservationDimensions(x);
            int actual   = hmm.NumberOfInputs;

            if (actual != expected)
            {
                throw new InvalidOperationException(String.Format("The specified emission distributions do not model observations with the same name of dimensions " +
                                                                  "as the training data. The training data has {0}-dimensional observations, but the emissions can model up to {1} dimensions.", expected, actual));
            }
        }
Ejemplo n.º 7
0
        /// <summary>
        ///   Calculates the most likely sequence of hidden states
        ///   that produced the given observation sequence.
        /// </summary>
        ///
        /// <remarks>
        ///   Decoding problem. Given the HMM M = (A, B, pi) and  the observation sequence
        ///   O = {o1,o2, ..., oK}, calculate the most likely sequence of hidden states Si
        ///   that produced this observation sequence O. This can be computed efficiently
        ///   using the Viterbi algorithm.
        /// </remarks>
        ///
        /// <param name="observations">A sequence of observations.</param>
        /// <param name="logLikelihood">The log-likelihood along the most likely sequence.</param>
        ///
        /// <returns>The sequence of states that most likely produced the sequence.</returns>
        ///
        public int[] Decode(Array observations, out double logLikelihood)
        {
            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            if (observations.Length == 0)
            {
                logLikelihood = Double.NegativeInfinity;
                return(new int[0]);
            }

            // Argument check
            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);

            return(viterbi(x, out logLikelihood));
        }
Ejemplo n.º 8
0
        /// <summary>
        ///   Calculates the log-likelihood that this model has generated the
        ///   given observation sequence along the given state path.
        /// </summary>
        ///
        /// <param name="observations">A sequence of observations. </param>
        /// <param name="path">A sequence of states. </param>
        ///
        /// <returns>
        ///   The log-likelihood that the given sequence of observations has
        ///   been generated by this model along the given sequence of states.
        /// </returns>
        ///
        public double Evaluate(Array observations, int[] path)
        {
            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            if (path == null)
            {
                throw new ArgumentNullException("path");
            }

            if (observations.Length == 0)
            {
                return(Double.NegativeInfinity);
            }


            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);

            try
            {
                double logLikelihood = Probabilities[path[0]]
                                       + Emissions[path[0]].LogProbabilityFunction(x[0]);

                for (int i = 1; i < observations.Length; i++)
                {
                    double a = Transitions[path[i - 1], path[i]];
                    double b = Emissions[path[i]].LogProbabilityFunction(x[i]);
                    logLikelihood += a + b;
                }

                // Return the sequence probability
                return(logLikelihood);
            }
            catch (IndexOutOfRangeException ex)
            {
                checkHiddenStates(ex, path);
                throw;
            }
        }
Ejemplo n.º 9
0
        /// <summary>
        ///   Predicts the next observation occurring after a given observation sequence.
        /// </summary>
        ///
        private double[] predict <TUnivariate>(double[] observations,
                                               out double logLikelihood, out Mixture <TUnivariate> probabilities)
            where TUnivariate : DistributionBase, TDistribution, IUnivariateDistribution
        {
            // Convert to multivariate observations
            double[][] obs = MarkovHelperMethods.convertNoCheck(observations, dimension);

            // Matrix to store the probabilities in assuming the next
            // observations (prediction) will belong to each state.
            double[][] weights;

            // Compute the next observation (currently only one ahead is supported).
            double[][] prediction = predict(obs, 1, out logLikelihood, out weights);

            // Create the mixture distribution defining the model likelihood in
            // assuming the next observation belongs will belong to each state.
            TUnivariate[] b = Array.ConvertAll(B, x => (TUnivariate)x);
            probabilities = new Mixture <TUnivariate>(weights[1].Exp(), b);

            return(prediction[0]);
        }
Ejemplo n.º 10
0
        /// <summary>
        ///   Calculates the most likely sequence of hidden states
        ///   that produced the given observation sequence.
        /// </summary>
        ///
        /// <remarks>
        ///   Decoding problem. Given the HMM M = (A, B, pi) and  the observation sequence
        ///   O = {o1,o2, ..., oK}, calculate the most likely sequence of hidden states Si
        ///   that produced this observation sequence O. This can be computed efficiently
        ///   using the Viterbi algorithm.
        /// </remarks>
        ///
        /// <param name="observations">A sequence of observations.</param>
        /// <param name="logLikelihood">The log-likelihood along the most likely sequence.</param>
        /// <returns>The sequence of states that most likely produced the sequence.</returns>
        ///
        public int[] Decode(Array observations, out double logLikelihood)
        {
            if (observations == null)
            {
                throw new ArgumentNullException("observations");
            }

            if (observations.Length == 0)
            {
                logLikelihood = Double.NegativeInfinity;
                return(new int[0]);
            }

            // Argument check
            double[][] x = MarkovHelperMethods.checkAndConvert(observations, dimension);


            // Viterbi-forward algorithm.
            int    T      = x.Length;
            int    states = States;
            int    maxState;
            double maxWeight;
            double weight;

            double[] logPi = Probabilities;
            double[,] logA = Transitions;

            int[,] s        = new int[states, T];
            double[,] lnFwd = new double[states, T];


            // Base
            for (int i = 0; i < states; i++)
            {
                lnFwd[i, 0] = logPi[i] + B[i].LogProbabilityFunction(x[0]);
            }

            // Induction
            for (int t = 1; t < T; t++)
            {
                double[] observation = x[t];

                for (int j = 0; j < states; j++)
                {
                    maxState  = 0;
                    maxWeight = lnFwd[0, t - 1] + logA[0, j];

                    for (int i = 1; i < states; i++)
                    {
                        weight = lnFwd[i, t - 1] + logA[i, j];

                        if (weight > maxWeight)
                        {
                            maxState  = i;
                            maxWeight = weight;
                        }
                    }

                    lnFwd[j, t] = maxWeight + B[j].LogProbabilityFunction(observation);
                    s[j, t]     = maxState;
                }
            }

            // Find maximum value for time T-1
            maxState  = 0;
            maxWeight = lnFwd[0, T - 1];

            for (int i = 1; i < states; i++)
            {
                if (lnFwd[i, T - 1] > maxWeight)
                {
                    maxState  = i;
                    maxWeight = lnFwd[i, T - 1];
                }
            }


            // Trackback
            int[] path = new int[T];
            path[T - 1] = maxState;

            for (int t = T - 2; t >= 0; t--)
            {
                path[t] = s[path[t + 1], t + 1];
            }


            // Returns the sequence probability as an out parameter
            logLikelihood = maxWeight;

            // Returns the most likely (Viterbi path) for the given sequence
            return(path);
        }