示例#1
0
    public static void CreateModelFromFrames(string readPath, string writePath)
    {
        SequenceList seq = Utils.FramesToSequenceList(Utils.LoadListListFrame(readPath));

        HiddenMarkovModel <MultivariateNormalDistribution> hmm;
        MultivariateNormalDistribution mnd = new MultivariateNormalDistribution(seq.GetArray()[0][0].Length);

        hmm = new HiddenMarkovModel <MultivariateNormalDistribution>(new Forward(5), mnd);

        var teacher = new BaumWelchLearning <MultivariateNormalDistribution>(hmm);

        teacher.Tolerance      = 0.0001;
        teacher.Iterations     = 0;
        teacher.FittingOptions = new NormalOptions()
        {
            Diagonal       = true,            // only diagonal covariance matrices
            Regularization = 1e-5             // avoid non-positive definite errors
        };

        double logLikelihood = teacher.Run(seq.GetArray());

        Debug.Log(readPath + " - " + seq.sequences.Count + " - " + logLikelihood);

        hmm.Save(writePath);
    }
示例#2
0
        /// <summary>
        /// Classify our data using hidden markov model classifer and save the model.
        /// </summary>
        /// <param name="Data_Path">Path of the data on the disk.</param>
        /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param>
        /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param>
        /// <returns></returns>
        public void HMM(String Data_Path, String Classifier_Path, String Classifier_Name)
        {
            double[][] input     = Serialize.DeSerializeObject <double[][]>(Data_Path);
            int[][]    sequences = new int[input.Length][];

            for (int i = 0; i < input.Length; i++)
            {
                int[] temp = new int[2];
                temp[0]      = (int)input[i][0];
                temp[1]      = (int)input[i][1];
                sequences[i] = temp;
            }

            // Create the learning algorithm
            var teacher = new BaumWelchLearning()
            {
                Topology        = new Ergodic(3), // Create a new Hidden Markov Model with 3 states for
                NumberOfSymbols = 2,              // an output alphabet of two characters (zero and one)
                Tolerance       = 0.0001,         // train until log-likelihood changes less than 0.0001
                Iterations      = 0               // and use as many iterations as needed
            };

            // Estimate the model
            HiddenMarkovModel hmm = teacher.Learn(sequences);

            hmm.Save(Path.Combine(Classifier_Path, Classifier_Name));


            //for (int i = 0; i < sequences.Length; i++)
            //{
            //    double fl1 = hmm.LogLikelihood(sequences[i]);
            //    Console.WriteLine(fl1);

            //}
        }
示例#3
0
        /*public static List<List<Frame>> JoinListListFrame(List<List<Frame>> listF1, List<List<Frame>> listF2)
         * {
         *  List<List<Frame>> returnVal = new List<List<Frame>>();
         *  returnVal.AddRange(listF1);
         *  returnVal.AddRange(listF2);
         *  return returnVal;
         * }*/

        public static void SaveHMM(HiddenMarkovModel <MultivariateNormalDistribution> model, string path)
        {
            model.Save(path);
        }