public HiddenMarkovModel(int state_count, DistributionModel[] emissions)
        {
            mStateCount = state_count;
            DiagnosticsHelper.Assert(emissions.Length == mStateCount);

            mLogTransitionMatrix  = new double[mStateCount, mStateCount];
            mLogProbabilityVector = new double[mStateCount];

            mLogProbabilityVector[0] = 1.0;

            for (int i = 0; i < mStateCount; ++i)
            {
                mLogProbabilityVector[i] = System.Math.Log(mLogProbabilityVector[i]);

                for (int j = 0; j < mStateCount; ++j)
                {
                    mLogTransitionMatrix[i, j] = System.Math.Log(1.0 / mStateCount);
                }
            }

            mEmissionModels = new DistributionModel[mStateCount];

            for (int i = 0; i < mStateCount; ++i)
            {
                mEmissionModels[i] = emissions[0].Clone();
            }

            if (emissions[0] is MultivariateDistributionModel)
            {
                mMultivariate = true;
                mDimension    = ((MultivariateDistributionModel)mEmissionModels[0]).Dimension;
            }
        }
        /// <summary>
        /// Compute forward probabilities for a given hidden Markov model and a set of observations with scaling
        /// </summary>
        /// <param name="logA">Transition Matrix: logA[i, j] is the probability of transitioning state i to state j (in log term)</param>
        /// <param name="logB">Emission Matrix: logB[observation[t], i] is the probability that given the state at t is i, the observed state at t is observation[t]  (in log term)</param>
        /// <param name="logPi">State Vector: logPi[i] is the probability that a particular state is t at any time, this can be also interpreted as the probability of initial states  (in log term)</param>
        /// <param name="observations">Observed time series</param>
        /// <param name="lnfwd">Forward Probability Matrix: fwd[t, i] is the scaled probability that provides us with the probability of being in state i at time t.</param>
        public static void LogForward(double[,] logA, DistributionModel[] logB, double[] logPi, double[] observations, double[,] lnfwd)
        {
            int T = observations.Length; // length of the observation
            int N = logPi.Length;        // number of states

            DiagnosticsHelper.Assert(logA.GetLength(0) == N);
            DiagnosticsHelper.Assert(logA.GetLength(1) == N);
            DiagnosticsHelper.Assert(logB.Length == N);
            DiagnosticsHelper.Assert(lnfwd.GetLength(0) >= T);
            DiagnosticsHelper.Assert(lnfwd.GetLength(1) == N);

            System.Array.Clear(lnfwd, 0, lnfwd.Length);

            for (int i = 0; i < N; ++i)
            {
                lnfwd[0, i] = logPi[i] + MathHelper.LogProbabilityFunction(logB[i], observations[0]);
            }

            for (int t = 1; t < T; ++t)
            {
                double obs_t = observations[t];

                for (int i = 0; i < N; ++i)
                {
                    double sum = double.NegativeInfinity;
                    for (int j = 0; j < N; ++j)
                    {
                        sum = LogHelper.LogSum(sum, lnfwd[t - 1, j] + logA[j, i]);
                    }
                    lnfwd[t, i] = sum + MathHelper.LogProbabilityFunction(logB[i], obs_t);
                }
            }
        }
        /// <summary>
        /// Compute backward probabilities for a given hidden Markov model and a set of observations with scaling
        /// </summary>
        /// <param name="logA">Transition Matrix: A[i, j] is the probability of transitioning state i to state j</param>
        /// <param name="logB">Emission Matrix: B[observation[t], i] is the probability that given the state at t is i, the observed state at t is observation[t]</param>
        /// <param name="logPi">State Vector: pi[i] is the probability that a particular state is t at any time, this can be also interpreted as the probability of initial states </param>
        /// <param name="observations">Observed time series</param>
        /// <param name="lnbwd">Backward Probability Matrix: fwd[t, i] is the scaled probability that provides us with the probability of being in state i at time t.</param>
        public static void LogBackward(double[,] logA, DistributionModel[] logB, double[] logPi, double[] observations, double[,] lnbwd)
        {
            int T = observations.Length; //length of time series
            int N = logPi.Length;        //number of states

            DiagnosticsHelper.Assert(logA.GetLength(0) == N);
            DiagnosticsHelper.Assert(logA.GetLength(1) == N);
            DiagnosticsHelper.Assert(logB.Length == N);
            DiagnosticsHelper.Assert(lnbwd.GetLength(0) >= T);
            DiagnosticsHelper.Assert(lnbwd.GetLength(1) == N);

            Array.Clear(lnbwd, 0, lnbwd.Length);

            for (int i = 0; i < N; ++i)
            {
                lnbwd[T - 1, i] = 0;
            }

            for (int t = T - 2; t >= 0; t--)
            {
                for (int i = 0; i < N; ++i)
                {
                    double sum = double.NegativeInfinity;
                    for (int j = 0; j < N; ++j)
                    {
                        sum = LogHelper.LogSum(sum, logA[i, j] + MathHelper.LogProbabilityFunction(logB[j], observations[t + 1]) + lnbwd[t + 1, j]);
                    }
                    lnbwd[t, i] += sum;
                }
            }
        }
        public double Run(int[][] observations_db, int[] class_labels)
        {
            ValidationHelper.ValidateObservationDb(observations_db, 0, mClassifier.SymbolCount);

            int class_count = mClassifier.ClassCount;

            double[] logLikelihood = new double[class_count];

            int K = class_labels.Length;

            DiagnosticsHelper.Assert(observations_db.Length == K);

            int[] class_label_counts = new int[class_count];

            Parallel.For(0, class_count, i =>
            {
                IUnsupervisedLearning teacher = mAlgorithmEntity(i);

                List <int> match_record_index_set = new List <int>();
                for (int k = 0; k < K; ++k)
                {
                    if (class_labels[k] == i)
                    {
                        match_record_index_set.Add(k);
                    }
                }

                int K2 = match_record_index_set.Count;

                class_label_counts[i] = K2;

                if (K2 != 0)
                {
                    int[][] observations_subdb = new int[K2][];
                    for (int k = 0; k < K2; ++k)
                    {
                        int record_index      = match_record_index_set[k];
                        observations_subdb[k] = observations_db[record_index];
                    }


                    logLikelihood[i] = teacher.Run(observations_subdb);
                }
            });

            if (mEmpirical)
            {
                for (int i = 0; i < class_count; i++)
                {
                    mClassifier.Priors[i] = (double)class_label_counts[i] / K;
                }
            }

            //if (mRejection)
            //{
            //    mClassifier.Threshold = Threshold();
            //}

            return(logLikelihood.Sum());
        }
Пример #5
0
        /// <summary>
        /// Compute forward probabilities for a given hidden Markov model and a set of observations without scaling
        /// </summary>
        /// <param name="A">Transition Matrix: A[i, j] is the probability of transitioning state i to state j</param>
        /// <param name="B">Emission Matrix: B[observation[t], i] is the probability that given the state at t is i, the observed state at t is observation[t]</param>
        /// <param name="pi">State Vector: pi[i] is the probability that a particular state is t at any time, this can be also interpreted as the probability of initial states </param>
        /// <param name="observations">Observed time series</param>
        /// <param name="fwd">Forward Probability Matrix: fwd[t, i] is the scaled probability that provides us with the probability of being in state i at time t.</param>
        public static double[,] Forward(double[,] A, double[,] B, double[] pi, int[] observations)
        {
            int T = observations.Length; // length of the observation
            int N = pi.Length;           // number of states

            double[,] fwd = new double[T, N];

            DiagnosticsHelper.Assert(A.GetLength(0) == N);
            DiagnosticsHelper.Assert(A.GetLength(1) == N);
            DiagnosticsHelper.Assert(B.GetLength(0) == N);

            for (int i = 0; i < N; ++i)
            {
                fwd[0, i] = pi[i] * B[i, observations[0]];
            }

            for (int t = 1; t < T; ++t)
            {
                int obs_t = observations[t];

                for (int i = 0; i < N; ++i)
                {
                    double sum = 0.0; //probability that the sequence will have state at time t equal to i
                    for (int j = 0; j < N; ++j)
                    {
                        sum += fwd[t - 1, j] * A[j, i];
                    }
                    double prob_obs_state_i = sum * B[i, obs_t]; //probability that the sequence will have the observed state at time time equal to i
                    fwd[t, i] = prob_obs_state_i;
                }
            }

            return(fwd);
        }
Пример #6
0
        /// <summary>
        /// Compute backward probabilities for a given hidden Markov model and a set of observations with scaling
        /// </summary>
        /// <param name="A">Transition Matrix: A[i, j] is the probability of transitioning state i to state j</param>
        /// <param name="B">Emission Matrix: B[observation[t], i] is the probability that given the state at t is i, the observed state at t is observation[t]</param>
        /// <param name="pi">State Vector: pi[i] is the probability that a particular state is t at any time, this can be also interpreted as the probability of initial states </param>
        /// <param name="observations">Observed time series</param>
        /// <param name="scale_vector">Scale Vector</param>
        /// <param name="bwd">Backward Probability Matrix: bwd[t, i] is the scaled probability that provides us with the probability of being in state i at time t.</param>
        public static void Backward(double[,] A, double[,] B, double[] pi, int[] observations, double[] scale_vector, double[,] bwd)
        {
            int T = observations.Length; //length of time series
            int N = pi.Length;           //number of states

            DiagnosticsHelper.Assert(A.GetLength(0) == N);
            DiagnosticsHelper.Assert(A.GetLength(1) == N);
            DiagnosticsHelper.Assert(B.GetLength(0) == N);
            DiagnosticsHelper.Assert(scale_vector.Length >= T);
            DiagnosticsHelper.Assert(bwd.GetLength(0) >= T);
            DiagnosticsHelper.Assert(bwd.GetLength(1) == N);

            Array.Clear(bwd, 0, bwd.Length);

            for (int i = 0; i < N; ++N)
            {
                bwd[T - 1, i] = 1.0 / scale_vector[T - 1];
            }

            for (int t = T - 2; t >= 0; t--)
            {
                for (int i = 0; i < N; ++i)
                {
                    double sum = 0.0; //probability that the sequence will have state i at time t
                    for (int j = 0; j < N; ++j)
                    {
                        sum += A[i, j] * B[j, observations[t + 1]] * bwd[t + 1, j];
                    }
                    bwd[t, i] += sum / scale_vector[t];
                }
            }
        }
Пример #7
0
        /// <summary>
        /// Compute forward probabilities for a given hidden Markov model and a set of observations with scaling
        /// </summary>
        /// <param name="A">Transition Matrix: A[i, j] is the probability of transitioning state i to state j</param>
        /// <param name="B">Emission Matrix: B[observation[t], i] is the probability that given the state at t is i, the observed state at t is observation[t]</param>
        /// <param name="pi">State Vector: pi[i] is the probability that a particular state is t at any time, this can be also interpreted as the probability of initial states </param>
        /// <param name="observations">Observed time series</param>
        /// <param name="scale_vector">Scale Vector</param>
        /// <param name="fwd">Forward Probability Matrix: fwd[t, i] is the scaled probability that provides us with the probability of being in state i at time t.</param>
        public static void Forward(double[,] A, double[,] B, double[] pi, int[] observations, double[] scale_vector, double[,] fwd)
        {
            int T = observations.Length; // length of the observation
            int N = pi.Length;           // number of states

            DiagnosticsHelper.Assert(A.GetLength(0) == N);
            DiagnosticsHelper.Assert(A.GetLength(1) == N);
            DiagnosticsHelper.Assert(B.GetLength(0) == N);
            DiagnosticsHelper.Assert(scale_vector.Length >= T);
            DiagnosticsHelper.Assert(fwd.GetLength(0) >= T);
            DiagnosticsHelper.Assert(fwd.GetLength(1) == N);

            System.Array.Clear(fwd, 0, fwd.Length);

            double c_t = 0.0;

            for (int i = 0; i < N; ++i)
            {
                c_t += fwd[0, i] = pi[i] * B[i, observations[0]];
            }

            //scale probability
            if (c_t != 0)
            {
                for (int i = 0; i < N; ++i)
                {
                    fwd[0, i] /= c_t;
                }
            }

            for (int t = 1; t < T; ++t)
            {
                c_t = 0.0;
                int obs_t = observations[t];

                for (int i = 0; i < N; ++i)
                {
                    double prob_state_i = 0.0; //probability that the sequence will have state at time t equal to i
                    for (int j = 0; j < N; ++j)
                    {
                        prob_state_i += fwd[t - 1, j] * A[j, i];
                    }
                    double prob_obs_state_i = prob_state_i * B[i, obs_t]; //probability that the sequence will have the observed state at time time equal to i
                    fwd[t, i] = prob_obs_state_i;
                    c_t      += prob_obs_state_i;
                }

                scale_vector[t] = c_t;

                //scale probability
                if (c_t != 0)
                {
                    for (int i = 0; i < N; ++i)
                    {
                        fwd[t, i] /= c_t;
                    }
                }
            }
        }
        public HiddenMarkovModel(ITopology topology, DistributionModel[] emissions)
        {
            mStateCount = topology.Create(out mLogTransitionMatrix, out mLogProbabilityVector);
            DiagnosticsHelper.Assert(emissions.Length == mStateCount);

            mEmissionModels = new DistributionModel[mStateCount];

            for (int i = 0; i < mStateCount; ++i)
            {
                mEmissionModels[i] = emissions[i].Clone();
            }

            if (emissions[0] is MultivariateDistributionModel)
            {
                mMultivariate = true;
                mDimension    = ((MultivariateDistributionModel)mEmissionModels[0]).Dimension;
            }
        }
        public HiddenMarkovClassifier(int class_count, int[] state_count_array, DistributionModel B_distribution)
        {
            mClassCount  = class_count;
            mSymbolCount = -1;

            DiagnosticsHelper.Assert(state_count_array.Length >= class_count);

            mModels = new HiddenMarkovModel[mClassCount];
            for (int i = 0; i < mClassCount; ++i)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(state_count_array[i], B_distribution);
                mModels[i] = hmm;
            }

            mClassPriors = new double[mClassCount];
            for (int i = 0; i < mClassCount; ++i)
            {
                mClassPriors[i] = 1.0 / mClassCount;
            }
        }
        public HiddenMarkovClassifier(int class_count, ITopology[] topology_array, int symbol_count)
        {
            mClassCount  = class_count;
            mSymbolCount = symbol_count;

            DiagnosticsHelper.Assert(topology_array.Length >= class_count);

            mModels = new HiddenMarkovModel[mClassCount];

            for (int i = 0; i < mClassCount; ++i)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(topology_array[i], symbol_count);
                mModels[i] = hmm;
            }

            mClassPriors = new double[mClassCount];
            for (int i = 0; i < mClassCount; ++i)
            {
                mClassPriors[i] = 1.0 / mClassCount;
            }
        }
        public HiddenMarkovModel(double[,] A, DistributionModel[] emissions, double[] pi)
        {
            mStateCount = mLogProbabilityVector.Length;
            DiagnosticsHelper.Assert(emissions.Length == mStateCount);

            mLogTransitionMatrix  = LogHelper.Log(A);
            mLogProbabilityVector = LogHelper.Log(pi);

            mEmissionModels = new DistributionModel[mStateCount];

            for (int i = 0; i < mStateCount; ++i)
            {
                mEmissionModels[i] = emissions[i].Clone();
            }

            if (emissions[0] is MultivariateDistributionModel)
            {
                mMultivariate = true;
                mDimension    = ((MultivariateDistributionModel)mEmissionModels[0]).Dimension;
            }
        }
Пример #12
0
        public static int[] LogForward(double[,] logA, DistributionModel[] probB, double[] logPi, double[] observations, out double logLikelihood)
        {
            int T = observations.Length;
            int N = logPi.Length;

            DiagnosticsHelper.Assert(logA.GetLength(0) == N);
            DiagnosticsHelper.Assert(logA.GetLength(1) == N);
            DiagnosticsHelper.Assert(probB.Length == N);

            int[,] V = new int[T, N];

            double[,] fwd = new double[T, N];

            for (int i = 0; i < N; ++i)
            {
                fwd[0, i] = logPi[i] + MathHelper.LogProbabilityFunction(probB[i], observations[0]);
            }

            double maxWeight = 0;
            int    maxState  = 0;

            for (int t = 1; t < T; ++t)
            {
                double x = observations[t];
                for (int i = 0; i < N; ++i)
                {
                    maxWeight = fwd[t - 1, 0] + logA[0, i];
                    maxState  = 0;

                    double weight = 0;
                    for (int j = 1; j < N; ++j)
                    {
                        weight = fwd[t - 1, j] + logA[j, i];
                        if (maxWeight < weight)
                        {
                            maxWeight = weight;
                            maxState  = j;
                        }
                    }

                    fwd[t, i] = maxWeight + MathHelper.LogProbabilityFunction(probB[i], x);
                    V[t, i]   = maxState;
                }
            }

            maxState  = 0;
            maxWeight = fwd[T - 1, 0];
            for (int i = 0; i < N; ++i)
            {
                if (fwd[T - 1, i] > maxWeight)
                {
                    maxWeight = fwd[T - 1, i];
                    maxState  = i;
                }
            }

            int[] path = new int[T];
            path[T - 1] = maxState;
            for (int t = T - 2; t >= 0; --t)
            {
                path[t] = V[t + 1, path[t + 1]];
            }

            logLikelihood = maxWeight;

            return(path);
        }
Пример #13
0
        public double Run(double[][] observations_db, int[][] path_db)
        {
            int K = observations_db.Length;

            DiagnosticsHelper.Assert(path_db.Length == K);

            int N = mModel.StateCount;
            int M = mModel.SymbolCount;

            int[] initial = new int[N];
            int[,] transition_matrix = new int[N, N];

            for (int k = 0; k < K; ++k)
            {
                initial[path_db[k][0]]++;
            }

            int T = 0;

            for (int k = 0; k < K; ++k)
            {
                int[]    path         = path_db[k];
                double[] observations = observations_db[k];

                T = path.Length;
                for (int t = 0; t < T - 1; ++t)
                {
                    transition_matrix[path[t], path[t + 1]]++;
                }
            }


            // 3. Count emissions for each state
            List <double>[] clusters = new List <double> [N];
            for (int i = 0; i < N; i++)
            {
                clusters[i] = new List <double>();
            }

            // Count symbol frequencies per state
            for (int k = 0; k < K; k++)
            {
                for (int t = 0; t < path_db[k].Length; t++)
                {
                    int    state  = path_db[k][t];
                    double symbol = observations_db[k][t];

                    clusters[state].Add(symbol);
                }
            }


            // Estimate probability distributions
            for (int i = 0; i < N; i++)
            {
                if (clusters[i].Count > 0)
                {
                    mModel.EmissionModels[i].Process(clusters[i].ToArray());
                }
            }

            if (mUseLaplaceRule)
            {
                for (int i = 0; i < N; ++i)
                {
                    initial[i]++;

                    for (int j = 0; j < N; ++j)
                    {
                        transition_matrix[i, j]++;
                    }
                }
            }

            int initial_sum = initial.Sum();

            int[] transition_sum_vec = Sum(transition_matrix, 1);

            for (int i = 0; i < N; ++i)
            {
                mModel.LogProbabilityVector[i] = System.Math.Log(initial[i] / (double)initial_sum);
            }

            for (int i = 0; i < N; ++i)
            {
                double transition_sum = (double)transition_sum_vec[i];
                for (int j = 0; j < N; ++j)
                {
                    mModel.LogTransitionMatrix[i, j] = System.Math.Log(transition_matrix[i, j] / transition_sum);
                }
            }

            double logLikelihood = double.NegativeInfinity;

            for (int i = 0; i < observations_db.Length; i++)
            {
                logLikelihood = LogHelper.LogSum(logLikelihood, mModel.Evaluate(observations_db[i]));
            }

            return(logLikelihood);
        }
        public static int[] Forward(double[,] A, double[,] B, double[] pi, int[] observations, out double logLikelihood)
        {
            int T = observations.Length;
            int N = pi.Length;

            DiagnosticsHelper.Assert(A.GetLength(0) == N);
            DiagnosticsHelper.Assert(A.GetLength(1) == N);
            DiagnosticsHelper.Assert(B.GetLength(0) == N);

            int[,] V = new int[T, N];

            double[,] fwd = new double[T, N];

            for (int i = 0; i < N; ++i)
            {
                fwd[0, i] = pi[i] * B[i, observations[0]];
            }

            double maxWeight = 0;
            int    maxState  = 0;

            for (int t = 1; t < T; ++t)
            {
                for (int i = 0; i < N; ++i)
                {
                    maxWeight = fwd[t - 1, 0] * A[0, i];
                    maxState  = 0;

                    double weight = 0;
                    for (int j = 1; j < N; ++j)
                    {
                        weight = fwd[t - 1, j] * A[j, i];
                        if (maxWeight < weight)
                        {
                            maxWeight = weight;
                            maxState  = j;
                        }
                    }

                    fwd[t, i] = maxWeight * B[i, observations[t]];
                    V[t, i]   = maxState;
                }
            }

            maxState  = 0;
            maxWeight = fwd[T - 1, 0];
            for (int i = 0; i < N; ++i)
            {
                if (fwd[T - 1, i] > maxWeight)
                {
                    maxWeight = fwd[T - 1, i];
                    maxState  = i;
                }
            }

            int[] path = new int[T];
            path[T - 1] = maxState;
            for (int t = T - 2; t >= 0; --t)
            {
                path[t] = V[t + 1, path[t + 1]];
            }

            logLikelihood = System.Math.Log(maxWeight);

            return(path);
        }
Пример #15
0
        public double Run(int[][] observations_db, int[][] path_db)
        {
            int K = observations_db.Length;

            DiagnosticsHelper.Assert(path_db.Length == K);

            int N = mModel.StateCount;
            int M = mModel.SymbolCount;

            int[] initial = new int[N];
            int[,] transition_matrix = new int[N, N];
            int[,] emission_matrix   = new int[N, M];

            for (int k = 0; k < K; ++k)
            {
                initial[path_db[k][0]]++;
            }

            int T = 0;

            for (int k = 0; k < K; ++k)
            {
                int[] path         = path_db[k];
                int[] observations = observations_db[k];

                T = path.Length;
                for (int t = 0; t < T - 1; ++t)
                {
                    transition_matrix[path[t], path[t + 1]]++;
                }

                for (int t = 0; t < T; ++t)
                {
                    emission_matrix[path[t], observations[t]]++;
                }
            }

            if (mUseLaplaceRule)
            {
                for (int i = 0; i < N; ++i)
                {
                    initial[i]++;

                    for (int j = 0; j < N; ++j)
                    {
                        transition_matrix[i, j]++;
                    }

                    for (int j = 0; j < M; ++j)
                    {
                        emission_matrix[i, j]++;
                    }
                }
            }

            int initial_sum = initial.Sum();

            int[] transition_sum_vec = Sum(transition_matrix, 1);
            int[] emission_sum_vec   = Sum(emission_matrix, 1);

            for (int i = 0; i < N; ++i)
            {
                mModel.LogProbabilityVector[i] = System.Math.Log(initial[i] / (double)initial_sum);
            }

            for (int i = 0; i < N; ++i)
            {
                double transition_sum = (double)transition_sum_vec[i];
                for (int j = 0; j < N; ++j)
                {
                    mModel.LogTransitionMatrix[i, j] = System.Math.Log(transition_matrix[i, j] / transition_sum);
                }
            }

            for (int i = 0; i < N; ++i)
            {
                double emission_sum = (double)emission_sum_vec[i];
                for (int m = 0; m < M; ++m)
                {
                    mModel.LogEmissionMatrix[i, m] = System.Math.Log(emission_matrix[i, m] / emission_sum);
                }
            }

            double logLikelihood = double.NegativeInfinity;

            for (int i = 0; i < observations_db.Length; i++)
            {
                logLikelihood = LogHelper.LogSum(logLikelihood, mModel.Evaluate(observations_db[i]));
            }

            return(logLikelihood);
        }
        /// <summary>
        /// for univariate
        /// </summary>
        /// <param name="observations_db"></param>
        /// <param name="weights"></param>
        /// <returns></returns>
        public double Run(double[][] observations_db, double[] weights)
        {
            DiagnosticsHelper.Assert(mModel.Dimension == 1);

            int K = observations_db.Length;

            mLogWeights = new double[K];
            if (weights != null)
            {
                for (int k = 0; k < K; ++k)
                {
                    mLogWeights[k] = System.Math.Log(weights[k]);
                }
            }

            double[] observations_db_1d = MathHelper.Concatenate <double>(observations_db);
            double[] Bweights           = new double[observations_db_1d.Length];

            int    N   = mModel.StateCount;
            double lnK = System.Math.Log(K);

            double[,] logA = mModel.LogTransitionMatrix;
            DistributionModel[] probB = mModel.EmissionModels;
            double[]            logPi = mModel.LogProbabilityVector;

            int M = mModel.SymbolCount;

            mLogGamma = new double[K][, ];
            mLogKsi   = new double[K][][, ];

            for (int k = 0; k < K; ++k)
            {
                int T = observations_db[k].Length;
                mLogGamma[k] = new double[T, N];
                mLogKsi[k]   = new double[T][, ];

                for (int t = 0; t < T; ++t)
                {
                    mLogKsi[k][t] = new double[N, N];
                }
            }

            int maxT = observations_db.Max(x => x.Length);

            double[,] lnfwd = new double[maxT, N];
            double[,] lnbwd = new double[maxT, N];

            // Initialize the model log-likelihoods
            double newLogLikelihood = Double.NegativeInfinity;
            double oldLogLikelihood = Double.NegativeInfinity;

            int    iteration          = 0;
            double deltaLogLikelihood = 0;
            bool   should_continue    = true;

            do // Until convergence or max iterations is reached
            {
                oldLogLikelihood = newLogLikelihood;

                for (int k = 0; k < K; ++k)
                {
                    double[] observations = observations_db[k];
                    double[,] logGamma = mLogGamma[k];
                    double[][,] logKsi = mLogKsi[k];
                    double w = mLogWeights[k];
                    int    T = observations.Length;

                    ForwardBackwardAlgorithm.LogForward(logA, probB, logPi, observations, lnfwd);
                    ForwardBackwardAlgorithm.LogBackward(logA, probB, logPi, observations, lnbwd);

                    // Compute Gamma values
                    for (int t = 0; t < T; ++t)
                    {
                        double lnsum = double.NegativeInfinity;
                        for (int i = 0; i < N; ++i)
                        {
                            logGamma[t, i] = lnfwd[t, i] + lnbwd[t, i] + w;
                            lnsum          = LogHelper.LogSum(lnsum, logGamma[t, i]);
                        }
                        if (lnsum != Double.NegativeInfinity)
                        {
                            for (int i = 0; i < N; ++i)
                            {
                                logGamma[t, i] = logGamma[t, i] - lnsum;
                            }
                        }
                    }

                    // Compute Ksi values
                    for (int t = 0; t < T - 1; ++t)
                    {
                        double lnsum = double.NegativeInfinity;
                        double x     = observations[t + 1];

                        for (int i = 0; i < N; ++i)
                        {
                            for (int j = 0; j < N; ++j)
                            {
                                logKsi[t][i, j] = lnfwd[t, i] + logA[i, j] + lnbwd[t + 1, j] + MathHelper.LogProbabilityFunction(probB[j], x) + w;
                                lnsum           = LogHelper.LogSum(lnsum, logKsi[t][i, j]);
                            }
                        }

                        if (lnsum != double.NegativeInfinity)
                        {
                            for (int i = 0; i < N; ++i)
                            {
                                for (int j = 0; j < N; ++j)
                                {
                                    logKsi[t][i, j] = logKsi[t][i, j] - lnsum;
                                }
                            }
                        }
                    }

                    newLogLikelihood = Double.NegativeInfinity;
                    for (int i = 0; i < N; ++i)
                    {
                        newLogLikelihood = LogHelper.LogSum(newLogLikelihood, lnfwd[T - 1, i]);
                    }
                }

                newLogLikelihood /= K;

                deltaLogLikelihood = newLogLikelihood - oldLogLikelihood;

                iteration++;

                if (ShouldTerminate(deltaLogLikelihood, iteration))
                {
                    should_continue = false;
                }
                else
                {
                    // update pi
                    for (int i = 0; i < N; ++i)
                    {
                        double lnsum = double.NegativeInfinity;
                        for (int k = 0; k < K; ++k)
                        {
                            lnsum = LogHelper.LogSum(lnsum, mLogGamma[k][0, i]);
                        }
                        logPi[i] = lnsum - lnK;
                    }

                    // update A
                    for (int i = 0; i < N; ++i)
                    {
                        for (int j = 0; j < N; ++j)
                        {
                            double lndenom = double.NegativeInfinity;
                            double lnnum   = double.NegativeInfinity;

                            for (int k = 0; k < K; ++k)
                            {
                                int T = observations_db[k].Length;

                                for (int t = 0; t < T - 1; ++t)
                                {
                                    lnnum   = LogHelper.LogSum(lnnum, mLogKsi[k][t][i, j]);
                                    lndenom = LogHelper.LogSum(lndenom, mLogGamma[k][t, i]);
                                }
                            }

                            logA[i, j] = (lnnum == lndenom) ? 0 : lnnum - lndenom;
                        }
                    }


                    // update B
                    for (int i = 0; i < N; ++i)
                    {
                        double lnsum = double.NegativeInfinity;

                        for (int k = 0, w = 0; k < K; ++k)
                        {
                            double[] observations = observations_db[k];
                            int      T            = observations.Length;

                            for (int t = 0; t < T; ++t, ++w)
                            {
                                Bweights[w] = mLogGamma[k][t, i];
                                lnsum       = LogHelper.LogSum(lnsum, Bweights[w]);
                            }
                        }

                        if (lnsum != double.NegativeInfinity)
                        {
                            for (int w = 0; w < Bweights.Length; ++w)
                            {
                                Bweights[w] = Bweights[w] - lnsum;
                            }
                        }

                        for (int w = 0; w < Bweights.Length; ++w)
                        {
                            Bweights[w] = System.Math.Exp(Bweights[w]);
                        }

                        probB[i].Process(observations_db_1d, Bweights);
                    }
                }
            } while (should_continue);

            return(newLogLikelihood);
        }