Beispiel #1
0
        /// <summary>
        ///   Optimizes the defined function.
        /// </summary>
        ///
        /// <param name="values">The initial guess values for the parameters.</param>
        ///
        public double Minimize(double[] values)
        {
            solution = values;
            convergence.Clear();

            do
            {
                runEpoch();
            }while (!convergence.HasConverged);

            return(convergence.NewValue);
        }
Beispiel #2
0
        /// <summary>
        ///   Runs the learning algorithm.
        /// </summary>
        /// 
        /// <remarks>
        ///   Learning problem. Given some training observation sequences O = {o1, o2, ..., oK}
        ///   and general structure of HMM (numbers of hidden and visible states), determine
        ///   HMM parameters M = (A, B, pi) that best fit training data. 
        /// </remarks>
        /// 
        public double Run(params int[][] observations)
        {
            var model = mle.Model;
            convergence.Clear();

            double logLikelihood = Double.NegativeInfinity;
            for (int i = 0; i < observations.Length; i++)
                logLikelihood = Special.LogSum(logLikelihood, model.Evaluate(observations[i]));

            double newLogLikelihood = Double.NegativeInfinity;

            do // Until convergence or max iterations is reached
            {
                logLikelihood = newLogLikelihood;

                // Compute the Viterbi path for all sequences
                int[][] paths = new int[observations.Length][];
                for (int i = 0; i < observations.Length; i++)
                    paths[i] = model.Decode(observations[i]);

                // Compute Maximum Likelihood Estimation 
                mle.Run(observations, paths);

                // Compute log-likelihood
                newLogLikelihood = Double.NegativeInfinity;
                for (int i = 0; i < observations.Length; i++)
                    newLogLikelihood = Special.LogSum(newLogLikelihood, model.Evaluate(observations[i]));

                // Check convergence
                convergence.NewValue = newLogLikelihood;

            } while (convergence.HasConverged);

            return newLogLikelihood;
        }
Beispiel #3
0
        /// <summary>
        ///   Runs the learning algorithm.
        /// </summary>
        ///
        /// <remarks>
        ///   Learning problem. Given some training observation sequences O = {o1, o2, ..., oK}
        ///   and general structure of HMM (numbers of hidden and visible states), determine
        ///   HMM parameters M = (A, B, pi) that best fit training data.
        /// </remarks>
        ///
        public double Run(params Array[] observations)
        {
            var model = mle.Model;

            convergence.Clear();

            // Convert the generic representation to a vector of multivariate sequences
            double[][][] vectorObservations = new double[observations.Length][][];
            for (int i = 0; i < observations.Length; i++)
            {
                vectorObservations[i] = convert(observations[i], model.Dimension);
            }


            double logLikelihood = Double.NegativeInfinity;

            for (int i = 0; i < observations.Length; i++)
            {
                logLikelihood = Special.LogSum(logLikelihood, model.Evaluate(observations[i]));
            }

            double newLogLikelihood = Double.NegativeInfinity;

            do // Until convergence or max iterations is reached
            {
                logLikelihood = newLogLikelihood;

                // Compute the Viterbi path for all sequences
                int[][] paths = new int[observations.Length][];
                for (int i = 0; i < observations.Length; i++)
                {
                    paths[i] = model.Decode(vectorObservations[i]);
                }

                // Compute Maximum Likelihood Estimation
                mle.Run(vectorObservations, paths);

                // Compute log-likelihood
                newLogLikelihood = Double.NegativeInfinity;
                for (int i = 0; i < observations.Length; i++)
                {
                    newLogLikelihood = Special.LogSum(newLogLikelihood, model.Evaluate(observations[i]));
                }

                // Check convergence
                convergence.NewValue = newLogLikelihood;
            } while (!convergence.HasConverged);

            return(newLogLikelihood);
        }