예제 #1
0
        public static void Main(string[] args)
        {
            var data = new Sequences("nbt.1882-S6.txt");

            // Now we create a hidden Markov model with arbitrary probabilities
            var hmm = new HiddenMarkovModel(states: data.StateNum, symbols: data.SymbolNum);

            // Create a Baum-Welch learning algorithm to teach it
            var teacher = new BaumWelchLearning(hmm);

            // and call its Run method to start learning
            var    trainSamples = data.PartOfSequences(0, 2);
            double error        = teacher.Run(trainSamples);

            var testSamples = data.PartOfSequences(1, 2);

            // Let's now check the probability of some sequences:
            double prob1 = Math.Exp(hmm.Evaluate(trainSamples [0]));
            double prob2 = Math.Exp(hmm.Evaluate(trainSamples [1]));
            double prob3 = Math.Exp(hmm.Evaluate(trainSamples [2]));

            // Now those obviously violate the form of the training set:
            double prob4 = Math.Exp(hmm.Evaluate(testSamples [0]));
            double prob5 = Math.Exp(hmm.Evaluate(testSamples [1]));
        }
        private static HiddenMarkovModel trainHMM()
        {
            int states  = 3;
            int symbols = 3;

            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 0, 1, 1, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 2, 2 },
                new int[] { 0, 1, 1, 1, 2, 1 },
                new int[] { 0, 1, 1, 2, 2 },
                new int[] { 0, 0, 1, 1, 1, 2, 1 },
                new int[] { 0, 0, 0, 1, 1, 1, 2, 1 },
                new int[] { 0, 1, 1, 2, 2, 2 },
            };

            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(states), symbols);

            var teacher = new BaumWelchLearning(hmm)
            {
                Iterations = 100, Tolerance = 0
            };

            double ll = teacher.Run(sequences);

            return(hmm);
        }
예제 #3
0
        /// <summary>
        /// Classify our data using hidden markov model classifer and save the model.
        /// </summary>
        /// <param name="Data_Path">Path of the data on the disk.</param>
        /// <param name="Classifier_Path">Path where we want to save the classifer on the disk.</param>
        /// <param name="Classifier_Name">Name of the classifer we wnat to save.</param>
        /// <returns></returns>
        public void HMM(String Data_Path, String Classifier_Path, String Classifier_Name)
        {
            double[][] input     = Serialize.DeSerializeObject <double[][]>(Data_Path);
            int[][]    sequences = new int[input.Length][];

            for (int i = 0; i < input.Length; i++)
            {
                int[] temp = new int[2];
                temp[0]      = (int)input[i][0];
                temp[1]      = (int)input[i][1];
                sequences[i] = temp;
            }

            // Create the learning algorithm
            var teacher = new BaumWelchLearning()
            {
                Topology        = new Ergodic(3), // Create a new Hidden Markov Model with 3 states for
                NumberOfSymbols = 2,              // an output alphabet of two characters (zero and one)
                Tolerance       = 0.0001,         // train until log-likelihood changes less than 0.0001
                Iterations      = 0               // and use as many iterations as needed
            };

            // Estimate the model
            HiddenMarkovModel hmm = teacher.Learn(sequences);

            hmm.Save(Path.Combine(Classifier_Path, Classifier_Name));


            //for (int i = 0; i < sequences.Length; i++)
            //{
            //    double fl1 = hmm.LogLikelihood(sequences[i]);
            //    Console.WriteLine(fl1);

            //}
        }
        private Tradetype PredictNextTrade()
        {
            var res = Tradetype.Winning;

            if (_tradeReturns.IsReady)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(states: 3, symbols: 3);
                int[]             observationSequence = GetSequence();
                BaumWelchLearning teacher             = new BaumWelchLearning(hmm);

                // and call its Run method to start learning
                double error   = teacher.Run(observationSequence);
                int[]  predict = hmm.Predict(observationSequence, 1);

                if (predict[0] == 0)
                {
                    res = Tradetype.Losing;
                }
                else if (predict[0] == 1)
                {
                    res = Tradetype.Neutral;
                }
                else if (predict[0] == 2)
                {
                    res = Tradetype.Winning;
                }
            }

            return(res);
        }
예제 #5
0
        public void PredictTest3()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 4, 3, 5 },
                new[] { 1, 2,5 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int[] input = { 1, 2 }; // base sequence for prediction

            double[] logLikelihoods;

            // Predict the next observation in sequence
            int prediction = hmm.Predict(input, out logLikelihoods);

            var probs = Matrix.Exp(logLikelihoods);

            // At this point, prediction probabilities
            // should be equilibrated around 3, 4 and 5
            Assert.AreEqual(probs.Length, 6);
            Assert.AreEqual(probs[0], 0.00, 0.01);
            Assert.AreEqual(probs[1], 0.00, 0.01);
            Assert.AreEqual(probs[2], 0.00, 0.01);
            Assert.AreEqual(probs[3], 0.33, 0.05);
            Assert.AreEqual(probs[4], 0.33, 0.05);
            Assert.AreEqual(probs[5], 0.33, 0.05);


            double[][] probabilities2;

            // Predict the next 2 observation2 in sequence
            int[] prediction2 = hmm.Predict(input, 2, out probabilities2);

            Assert.AreEqual(probabilities2.Length, 2);
            Assert.AreEqual(probabilities2[0].Length, 6);
            Assert.AreEqual(probabilities2[1].Length, 6);

            Assert.IsTrue(probabilities2[0].IsEqual(logLikelihoods));
        }
예제 #6
0
        private TRADETYPE PredictNextTrade()
        {
            var res = TRADETYPE.WINNING;

            if (_tradeReturns.Count == 4)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(states: 3, symbols: 3);
                int[]             observationSequence = GetSequence(_tradeReturns);
                BaumWelchLearning teacher             = new BaumWelchLearning(hmm);

                // and call its Run method to start learning
                double error   = teacher.Run(observationSequence);
                int[]  predict = hmm.Predict(observationSequence, 1);
                if (predict [0] == 0)
                {
                    res = TRADETYPE.LOSING;
                }
                else if (predict [0] == 1)
                {
                    res = TRADETYPE.NEUTRAL;
                }
                else if (predict [0] == 2)
                {
                    res = TRADETYPE.WINNING;
                }
            }
            return(res);
        }
예제 #7
0
        public static string TrainingMode = "bayes"; //switch to "markov" to try the Hidden Markov model

        public void TrainHiddenMarkovModel(List <string[]> trainingData)
        {
            Accord.Math.Random.Generator.Seed = 42;

            // Dummy data
            var nodePairs = trainingData.ToArray();

            // Transform data to sequence of integer labels using a codification codebook:
            var codebook = new Codification("Nodes", nodePairs);

            // Create the training data for the models:
            var sequence = codebook.Transform("Nodes", nodePairs);

            // Specify a forward topology
            var topology = new Forward(4);
            var symbols  = codebook["Nodes"].NumberOfSymbols;

            // Create the hidden Markov model
            var hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            var teacher = new BaumWelchLearning(hmm);

            // Teach the model
            teacher.Learn(sequence);

            // Use the Serializer class to save model and codebook
            Serializer.Save(codebook, "thesaurus_codebook.accord");
            Serializer.Save(hmm, "thesaurus_HMModel.accord");
        }
예제 #8
0
    public static void CreateModelFromFrames(string readPath, string writePath)
    {
        SequenceList seq = Utils.FramesToSequenceList(Utils.LoadListListFrame(readPath));

        HiddenMarkovModel <MultivariateNormalDistribution> hmm;
        MultivariateNormalDistribution mnd = new MultivariateNormalDistribution(seq.GetArray()[0][0].Length);

        hmm = new HiddenMarkovModel <MultivariateNormalDistribution>(new Forward(5), mnd);

        var teacher = new BaumWelchLearning <MultivariateNormalDistribution>(hmm);

        teacher.Tolerance      = 0.0001;
        teacher.Iterations     = 0;
        teacher.FittingOptions = new NormalOptions()
        {
            Diagonal       = true,            // only diagonal covariance matrices
            Regularization = 1e-5             // avoid non-positive definite errors
        };

        double logLikelihood = teacher.Run(seq.GetArray());

        Debug.Log(readPath + " - " + seq.sequences.Count + " - " + logLikelihood);

        hmm.Save(writePath);
    }
예제 #9
0
        public void PredictTest2()
        {
            // Create continuous sequences. In the sequence below, there
            // seems to be two states, one for values equal to 1 and another
            // for values equal to 2.
            double[][] sequences = new double[][]
            {
                new double[] { 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2 }
            };

            // Specify a initial normal distribution for the samples.
            NormalDistribution density = new NormalDistribution();

            // Creates a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel <NormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <NormalDistribution>(model)
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                // However, we will need to specify a regularization constant as the
                //  variance of each state will likely be zero (all values are equal)
                FittingOptions = new NormalOptions()
                {
                    Regularization = double.Epsilon
                }
            };

            // Fit the model
            double likelihood = teacher.Run(sequences);


            double a1 = model.Predict(new double[] { 1, 2, 1 });
            double a2 = model.Predict(new double[] { 1, 2, 1, 2 });

            Assert.AreEqual(2, a1, 1e-10);
            Assert.AreEqual(1, a2, 1e-10);
            Assert.IsFalse(Double.IsNaN(a1));
            Assert.IsFalse(Double.IsNaN(a2));

            double p1, p2;
            Mixture <NormalDistribution> d1, d2;
            double b1 = model.Predict(new double[] { 1, 2, 1 }, out p1, out d1);
            double b2 = model.Predict(new double[] { 1, 2, 1, 2 }, out p2, out d2);

            Assert.AreEqual(2, b1, 1e-10);
            Assert.AreEqual(1, b2, 1e-10);
            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            Assert.AreEqual(0, d1.Coefficients[0]);
            Assert.AreEqual(1, d1.Coefficients[1]);

            Assert.AreEqual(1, d2.Coefficients[0]);
            Assert.AreEqual(0, d2.Coefficients[1]);
        }
예제 #10
0
        public static double BaumWelchLearning(double[][] data)
        {
            // Specify a initial normal distribution for the samples.
            NormalDistribution density = new NormalDistribution();

            // Creates a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel <NormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <NormalDistribution>(model)
            {
                Tolerance  = 0.001,
                Iterations = 0,
            };

            // Fit the model
            double likelihood = teacher.Run(data);

            // See the log-probability of the sequences learned
            double a1 = model.Evaluate(new[] { 0.999999999999928, 0, 0.999999999999988, 0, 0.999999999999988 });    // -0.12799388666109757

            return(a1);
        }
예제 #11
0
        public void LearnTest3()
        {
            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new double[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a new Hidden Markov Model with 3 states
            var hmm = HiddenMarkovModel.CreateGeneric(3, 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance = 0.0001
            };
            double ll = teacher.Run(sequences);

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1; hmm.Decode(new double[] { 0, 1 }, out l1);                      // 0.4999
            double l2; hmm.Decode(new double[] { 0, 1, 1, 1 }, out l2);                // 0.1145

            double l3; hmm.Decode(new double[] { 1, 1 }, out l3);                      // 0.0000
            double l4; hmm.Decode(new double[] { 1, 0, 0, 0 }, out l4);                // 0.0000

            double l5; hmm.Decode(new double[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0002
            double l6; hmm.Decode(new double[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0002


            ll = System.Math.Exp(ll);
            l1 = System.Math.Exp(l1);
            l2 = System.Math.Exp(l2);
            l3 = System.Math.Exp(l3);
            l4 = System.Math.Exp(l4);
            l5 = System.Math.Exp(l5);
            l6 = System.Math.Exp(l6);

            Assert.AreEqual(0.95151018769760853, ll, 1e-4);
            Assert.AreEqual(0.4999419764097881, l1, 1e-4);
            Assert.AreEqual(0.1145702973735144, l2, 1e-4);
            Assert.AreEqual(0.0000529972606821, l3, 1e-4);
            Assert.AreEqual(0.0000000000000001, l4, 1e-4);
            Assert.AreEqual(0.0002674509390361, l5, 1e-4);
            Assert.AreEqual(0.0002674509390361, l6, 1e-4);

            Assert.IsTrue(l1 > l3 && l1 > l4);
            Assert.IsTrue(l2 > l3 && l2 > l4);

            Assert.AreEqual(1, hmm.Dimension);
        }
예제 #12
0
        public void learn_predict()
        {
            #region doc_predict
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 3, 5, 7,  9, 11, 13 },
                new[] { 1, 3, 5, 7,  9,11 },
                new[] { 1, 3, 5, 7,  9, 11, 13 },
                new[] { 1, 3, 3, 7,  7,  9,11, 11, 13, 13 },
                new[] { 1, 3, 7, 9, 11,13 },
            };

            // Create a Baum-Welch HMM algorithm:
            var teacher = new BaumWelchLearning()
            {
                // Let's creates a left-to-right (forward)
                // Hidden Markov Model with 7 hidden states
                Topology = new Forward(7),

                // We'll try to fit the model to the data until the difference in
                // the average log-likelihood changes only by as little as 0.0001
                Tolerance  = 0.0001,
                Iterations = 0 // do not impose a limit on the number of iterations
            };

            // Use the algorithm to learn a new Markov model:
            HiddenMarkovModel hmm = teacher.Learn(sequences);

            // Now, we will try to predict the next 1 observation in a base symbol sequence
            int[] prediction = hmm.Predict(observations: new[] { 1, 3, 5, 7, 9 }, next: 1);

            // At this point, prediction should be int[] { 11 }
            int nextSymbol = prediction[0]; // should be 11.

            // We can try to predict further, but this might not work very
            // well due the Markov assumption between the transition states:
            int[] nextSymbols = hmm.Predict(observations: new[] { 1, 3, 5, 7 }, next: 2);

            // At this point, nextSymbols should be int[] { 9, 11 }
            int nextSymbol1 = nextSymbols[0]; // 9
            int nextSymbol2 = nextSymbols[1]; // 11
            #endregion

            Assert.AreEqual(9, nextSymbol1);
            Assert.AreEqual(11, nextSymbol2);

            Assert.AreEqual(prediction.Length, 1);
            Assert.AreEqual(11, prediction[0]);

            Assert.AreEqual(2, nextSymbols.Length);
            Assert.AreEqual(new[] { 9, 11 }, nextSymbols);
        }
        public static void Generate()
        {
            MathHelper.SetupGenerator(42);

            // Consider some phrases:
            //
            string[][] phrases =
            {
                new[] { "those",  "are",   "sample", "words", "from", "a", "dictionary" },
                new[] { "those",  "are",   "sample", "words" },
                new[] { "sample", "words", "are",    "words" },
                new[] { "those",  "words" },
                new[] { "those",  "are",   "words" },
                new[] { "words",  "from",  "a",      "dictionary" },
                new[] { "those",  "are",   "words",  "from",  "a",    "dictionary" }
            };

            // Let's begin by transforming them to sequence of
            // integer labels using a codification codebook:
            var codebook = new Codification(phrases);

            // Now we can create the training data for the models:
            int[][] sequence = codebook.Translate(phrases);

            // To create the models, we will specify a forward topology,
            // as the sequences have definite start and ending points.
            //
            var topology = new Forward(state_count: 4);
            int symbols  = codebook.SymbolCount; // We have 7 different words

            Console.WriteLine("Symbol Count: {0}", symbols);

            // Create the hidden Markov model
            HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            BaumWelchLearning teacher = new BaumWelchLearning(hmm);

            // Teach the model about the phrases
            double error = teacher.Run(sequence);

            // Now, we can ask the model to generate new samples
            // from the word distributions it has just learned:
            //
            int[] sample = hmm.Generate(3);

            // And the result will be: "those", "are", "words".
            string[] result = codebook.Translate(sample);

            foreach (string result_word in result)
            {
                Console.WriteLine(result_word);
            }
        }
예제 #14
0
        public MarkovGenerator(string trainingFile)
        {
            List <Note> noteList = new Loader().LoadMidiFile(trainingFile);

            if (noteList == null)
            {
                return;
            }

            Note[] basis = noteList.ToArray();

            int minId = 1000;
            int maxId = -1000;

            for (int i = 0; i < basis.Length; i++)
            {
                if (basis[i].Id < minId)
                {
                    minId = basis[i].Id;
                }

                if (basis[i].Id > maxId)
                {
                    maxId = basis[i].Id;
                }
            }

            int range = maxId - minId;

            addToNote = -minId;

            int[][] sequences = new int[basis.Length / 64][];
            for (int i = 0; i < basis.Length / 64; i++)
            {
                sequences[i] = new int[64];

                for (int j = 0; j < 64; j++)
                {
                    Note basisNote          = basis[i + j];
                    int  noteRepresentation = ((basisNote.Id + addToNote) * 5) + (int)Math.Log((int)basisNote.Length, 2.0);

                    sequences[i][j] = noteRepresentation;
                }
            }

            model = new HiddenMarkovModel(64, (range * 5) + 10);

            BaumWelchLearning bwTeacher = new BaumWelchLearning(model)
            {
                Iterations = 10
            };

            bwTeacher.Run(sequences);
        }
예제 #15
0
        static void runArbitraryDensityHiddenMarkovModelLearningExample()
        {
            // Create continuous sequences.
            //  In the sequences below, there seems to be two states, one for values between 0 and 1 and another for values between 5 and 7.
            //  The states seems to be switched on every observation.
            double[][] observationSequences = new double[][]
            {
                new double[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 },
                new double[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 },
                new double[] { 0.1, 7.0, 0.1, 7.0, 0.2, 5.6 },
            };

            // Creates a continuous hidden Markov Model with two states organized in a ergoric topology
            // and an underlying univariate Normal distribution as probability density.
            var hmm = new HiddenMarkovModel <NormalDistribution>(topology: new Ergodic(states: 2), emissions: new NormalDistribution());

            // Configure the learning algorithms to train the sequence classifier
            // until the difference in the average log-likelihood changes only by as little as 0.0001.
            var trainer = new BaumWelchLearning <NormalDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            };

            // Fit the model.
            double averageLogLikelihood = trainer.Run(observationSequences);

            Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood);

            // The log-probability of the sequences learned.
            double logLik1 = hmm.Evaluate(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 });  // -0.12799388666109757.
            double logLik2 = hmm.Evaluate(new[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 });  // 0.01171157434400194.

            // The log-probability of an unrelated sequence.
            double logLik3 = hmm.Evaluate(new[] { 1.1, 2.2, 1.3, 3.2, 4.2, 1.0 });  // -298.7465244473417.

            // Transform the log-probabilities to actual probabilities.
            Console.WriteLine("probability = {0}", Math.Exp(logLik1));  // 0.879.
            Console.WriteLine("probability = {0}", Math.Exp(logLik2));  // 1.011.
            Console.WriteLine("probability = {0}", Math.Exp(logLik3));  // 0.000.

            // Ask the model to decode one of the sequences.
            // The state variable will contain: { 0, 1, 0, 1, 0, 1 }.
            double logLikelihood = 0.0;

            int[] path = hmm.Decode(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, out logLikelihood);
            Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood);
            foreach (int state in path)
            {
                Console.Write("{0},", state);
            }
            Console.WriteLine("]");
        }
예제 #16
0
        public void GenerateTest2()
        {
            #region doc_generate
            Accord.Math.Random.Generator.Seed = 42;

            // Let's say we have the following set of sequences
            string[][] phrases =
            {
                new[] { "those",  "are",   "sample", "words", "from", "a", "dictionary" },
                new[] { "those",  "are",   "sample", "words" },
                new[] { "sample", "words", "are",    "words" },
                new[] { "those",  "words" },
                new[] { "those",  "are",   "words" },
                new[] { "words",  "from",  "a",      "dictionary" },
                new[] { "those",  "are",   "words",  "from",  "a",    "dictionary" }
            };

            // Let's begin by transforming them to sequence of
            // integer labels using a codification codebook:
            var codebook = new Codification("Words", phrases);

            // Now we can create the training data for the models:
            int[][] sequence = codebook.Translate("Words", phrases);

            // To create the models, we will specify a forward topology,
            // as the sequences have definite start and ending points.
            //
            var topology = new Forward(states: 4);
            int symbols  = codebook["Words"].Symbols; // We have 7 different words

            // Create the hidden Markov model
            var hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            var teacher = new BaumWelchLearning(hmm);

            // Teach the model
            teacher.Learn(sequence);

            // Now, we can ask the model to generate new samples
            // from the word distributions it has just learned:
            //
            int[] sample = hmm.Generate(3);

            // And the result will be: "those", "are", "words".
            string[] result = codebook.Translate("Words", sample);
            #endregion

            Assert.AreEqual("those", result[0]);
            Assert.AreEqual("are", result[1]);
            Assert.AreEqual("words", result[2]);
        }
예제 #17
0
        public void PredictTest3()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            double[][] sequences =
            {
                new double[] { 1, 2, 3, 4, 5 },
                new double[] { 1, 2, 4, 3, 5 },
                new double[] { 1, 2,5 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            var hmm = HiddenMarkovModel.CreateGeneric(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <GeneralDiscreteDistribution>(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            double[] input = { 1, 2 }; // base sequence for prediction


            // Predict the next observation in sequence
            Mixture <GeneralDiscreteDistribution> mixture = null;

            double prediction = hmm.Predict(input, out mixture);


            // At this point, prediction probabilities
            // should be equilibrated around 3, 4 and 5
            Assert.AreEqual(4, mixture.Mean, 0.1);
            Assert.IsFalse(double.IsNaN(mixture.Mean));


            double[] input2 = { 1 };

            // The only possible value after 1 must be 2.
            prediction = hmm.Predict(input2, out mixture);

            Assert.AreEqual(2, prediction);
        }
예제 #18
0
        public void BuildMarkovModel(List <List <int> > dataset)
        {
            Dictionary <string, int> interestingStates = GetInterestingStatesAsDictionary();

            //create new model
            _model = new HiddenMarkovModel(states: Timeline.Count, symbols: interestingStates.Count);

            //teach model
            BaumWelchLearning teacher = new BaumWelchLearning(_model);

            //convert timeline into 2D int array
            int[][] data = dataset.Select(a => a.ToArray()).ToArray();
            teacher.Run(data);
        }
예제 #19
0
        public static void BaumWelchLearning()
        {
            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a new Hidden Markov Model with 3 states for
            //  an output alphabet of two characters (zero and one)
            HiddenMarkovModel hmm = new HiddenMarkovModel(state_count: 3, symbol_count: 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 0.0001, Iterations = 0
            };
            double ll = teacher.Run(sequences);

            // Calculate the probability that the given
            //  sequences originated from the model
            double l1 = hmm.Evaluate(new int[] { 0, 1 });       // 0.999
            double l2 = hmm.Evaluate(new int[] { 0, 1, 1, 1 }); // 0.916

            Console.WriteLine("l1: {0}", System.Math.Exp(l1));
            Console.WriteLine("l2: {0}", System.Math.Exp(l2));

            // Sequences which do not start with zero have much lesser probability.
            double l3 = hmm.Evaluate(new int[] { 1, 1 });       // 0.000
            double l4 = hmm.Evaluate(new int[] { 1, 0, 0, 0 }); // 0.000

            Console.WriteLine("l3: {0}", System.Math.Exp(l3));
            Console.WriteLine("l4: {0}", System.Math.Exp(l4));

            // Sequences which contains few errors have higher probability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double l5 = hmm.Evaluate(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }); // 0.034
            double l6 = hmm.Evaluate(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }); // 0.034

            Console.WriteLine("l5: {0}", System.Math.Exp(l5));
            Console.WriteLine("l6: {0}", System.Math.Exp(l6));
        }
예제 #20
0
        private Tradetype PredictNextTrade()
        {
            var res = Tradetype.Winning;
            var observationSequence = GetSequence();

            if (observationSequence.All((o) => o == 0))
            {
                res = Tradetype.Losing;
            }
            else if (observationSequence.All((o) => o == 1))
            {
                res = Tradetype.Neutral;
            }
            else if (observationSequence.All((o) => o == 2))
            {
                res = Tradetype.Winning;
            }
            else if (observationSequence.Distinct().Count() < 3)
            {
                res = Tradetype.Neutral;
            }
            else if (_tradeReturns.Count == 4)
            {
                var teacher = new BaumWelchLearning()
                {
                    NumberOfStates  = 3,
                    NumberOfSymbols = observationSequence.Max() + 1
                };

                // and call its Run method to start learning
                var hmm     = teacher.Learn(new int[][] { observationSequence });
                var predict = hmm.Predict(observationSequence, 1);

                if (predict[0] == 0)
                {
                    res = Tradetype.Losing;
                }
                else if (predict[0] == 1)
                {
                    res = Tradetype.Neutral;
                }
                else if (predict[0] == 2)
                {
                    res = Tradetype.Winning;
                }
            }

            return(res);
        }
예제 #21
0
        public static void BaumWelchLearning()
        {
            // We will try to create a Hidden Markov Model which
            //  can detect if a given sequence starts with a zero
            //  and has any number of ones after that.
            int[][] sequences = new int[][]
            {
                new int[] { 0, 1, 1, 1, 1, 0, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
                new int[] { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 },
            };

            // Creates a new Hidden Markov Model with 3 states for
            //  an output alphabet of two characters (zero and one)
            HiddenMarkovModel hmm = new HiddenMarkovModel(3, 2);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 0.0001, Iterations = 0
            };

            double ll = teacher.Run(sequences);
            double l0 = Math.Exp(hmm.Evaluate(new int[] { 1, 0 }));
            // Calculate the probability that the given
            //  sequences originated from the model
            double l1 = Math.Exp(hmm.Evaluate(new int[] { 0, 1 }));       // 0.999
            double l2 = Math.Exp(hmm.Evaluate(new int[] { 0, 1, 1, 1 })); // 0.916

            // Sequences which do not start with zero have much lesser probability.
            double l3 = Math.Exp(hmm.Evaluate(new int[] { 1, 1 }));       // 0.000
            double l4 = Math.Exp(hmm.Evaluate(new int[] { 1, 0, 0, 0 })); // 0.000

            // Sequences which contains few errors have higher probability
            //  than the ones which do not start with zero. This shows some
            //  of the temporal elasticity and error tolerance of the HMMs.
            double l5 = Math.Exp(hmm.Evaluate(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 })); // 0.034
            double l6 = Math.Exp(hmm.Evaluate(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 })); // 0.034
        }
예제 #22
0
        public void PredictTest2()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 3, 3, 5 },
                new[] { 1, 2,3 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int length = 1;         // number of observations to predict

            int[] input = { 1, 2 }; // base sequence for prediction

            // Predict the next 1 observation in sequence
            int[] prediction = hmm.Predict(input, length);

            // At this point, prediction should be int[] { 3 }
            Assert.AreEqual(prediction.Length, 1);
            Assert.AreEqual(prediction[0], 3);
        }
예제 #23
0
        private void buttonLearnHMM_Click(object sender, EventArgs e)
        {
            // change numOfGesture value ratul
            //ITopology forward = new Forward(states: 6);
            //classifier = new HiddenMarkovClassifier(classes: 5,topology: forward,symbols: 20);
            //var teacher = new HiddenMarkovClassifierLearning(classifier,
            //    modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            //    {
            //        Tolerance = 0.0001, // iterate until log-likelihood changes less than 0.001
            //        Iterations = 0     // don't place an upper limit on the number of iterations
            //    });

            //int[][] inputSequences = trainingSequences.Select(a => a.ToArray()).ToArray();
            //int[] outputLabels = trainingLabels.ToArray();

            //double error = teacher.Run(inputSequences,outputLabels);

            //////////////////////////////////////////////////

            for (int i = 0; i < numOfGestures; i++)
            {
                ts.Clear();
                for (int j = 0; j < trainingSequences.Count; j++)
                {
                    if (trainingLabels[j] == i)
                    {
                        ts.Add(new List <int>(trainingSequences[j]));
                    }
                }

                int[][]           inputSequences = ts.Select(a => a.ToArray()).ToArray();
                HiddenMarkovModel hmm            = new HiddenMarkovModel(6, 20);
                teacher = new BaumWelchLearning(hmm)
                {
                    Tolerance = 0.0001, Iterations = 0
                };
                teacher.Run(inputSequences);
                HMM.Add(new Tuple <int, HiddenMarkovModel>(i, hmm));
            }
        }
예제 #24
0
        public void LearnTest4()
        {
            int[][] sequences = new int[][]
            {
                new int[] { 0, 3, 1 },
                new int[] { 0, 2 },
                new int[] { 1, 0, 3 },
                new int[] { 3, 4 },
                new int[] { 0, 1, 3, 5 },
                new int[] { 0, 3, 4 },
                new int[] { 0, 1, 3, 5 },
                new int[] { 0, 1, 3, 5 },
                new int[] { 0, 1, 3, 4, 5 },
            };

            HiddenMarkovModel hmm = new HiddenMarkovModel(3, 6);

            var teacher = new BaumWelchLearning(hmm)
            {
                Iterations = 100, Tolerance = 0
            };

            double ll = teacher.Run(sequences);

            double l0; hmm.Decode(sequences[0], out l0);
            double l1; hmm.Decode(sequences[1], out l1);
            double l2; hmm.Decode(sequences[2], out l2);

            double pl = System.Math.Exp(ll);
            double p0 = System.Math.Exp(l0);
            double p1 = System.Math.Exp(l1);
            double p2 = System.Math.Exp(l2);

            Assert.AreEqual(0.49788370872923726, pl, 1e-10);
            Assert.AreEqual(0.014012065043262294, p0, 1e-10);
            Assert.AreEqual(0.016930905415294094, p1, 1e-10);
            Assert.AreEqual(0.001936595918966074, p2, 1e-10);
        }
예제 #25
0
        private static HiddenMarkovModel <MultivariateNormalDistribution> createModel()
        {
            double[][][] sequences =
            {
                new double[][]
                {
                    new double[] { 1, 2 },
                    new double[] { 6, 7 },
                    new double[] { 2, 3 },
                },
                new double[][]
                {
                    new double[] { 2, 2 },
                    new double[] { 9, 8 },
                    new double[] { 1, 0 },
                },
                new double[][]
                {
                    new double[] { 1, 3 },
                    new double[] { 8, 9 },
                    new double[] { 3, 3 },
                },
            };

            var density = new MultivariateNormalDistribution(dimension: 2);

            var model = new HiddenMarkovModel <MultivariateNormalDistribution>(new Forward(2), density);

            var teacher = new BaumWelchLearning <MultivariateNormalDistribution>(model)
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            };

            double logLikelihood = teacher.Run(sequences);

            return(model);
        }
예제 #26
0
        /*public static void SaveSequenceList(SequenceList seqList, string path)
         * {
         *  Stream writeStream = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.None);
         *  seqList.Save(writeStream);
         *  writeStream.Close();
         * }
         *
         * public static SequenceList LoadSequenceList(string path)
         * {
         *  Stream readStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read);
         *  SequenceList seqList = SequenceList.Load(readStream);
         *  readStream.Close();
         *  return seqList;
         * }*/

        public static HiddenMarkovModel <MultivariateNormalDistribution> CreateModelFromFrames(List <List <Frame> > frames)
        {
            SequenceList sequences = Utils.FramesToSequenceList(frames);

            HiddenMarkovModel <MultivariateNormalDistribution> hmm;
            MultivariateNormalDistribution mnd = new MultivariateNormalDistribution(sequences.GetDimensions());

            hmm = new HiddenMarkovModel <MultivariateNormalDistribution>(new Forward(5), mnd);

            var teacher = new BaumWelchLearning <MultivariateNormalDistribution>(hmm);

            teacher.Tolerance      = 0.0001;
            teacher.Iterations     = 0;
            teacher.FittingOptions = new NormalOptions()
            {
                Diagonal       = true, // only diagonal covariance matrices
                Regularization = 1e-5  // avoid non-positive definite errors
            };

            teacher.Run(sequences.GetArray());

            return(hmm);
        }
예제 #27
0
        public void LearnTest_EmptySequence()
        {
            int[][] sequences = new int[][]
            {
                new int[] { 0, 3, 1 },
                new int[] { 0, 2 },
                new int[] { 1, 0, 3 },
                new int[] { 3, 4 },
                new int[] { },
                new int[] { 0, 3, 4 },
                new int[] { 0, 1, 3, 5 },
                new int[] { 0, 1, 3, 5 },
                new int[] { 0, 1, 3, 4, 5 },
            };

            HiddenMarkovModel hmm = new HiddenMarkovModel(3, 6);

            var teacher = new BaumWelchLearning(hmm)
            {
                Iterations = 100, Tolerance = 0
            };

            bool thrown = false;

            try
            {
                double logLikelihood = teacher.Run(sequences);
            }
            catch (ArgumentException ex)
            {
                Assert.AreEqual("observations", ex.ParamName);
                thrown = true;
            }

            Assert.IsTrue(thrown);
        }
예제 #28
0
        public static void BaumWelchLearning()
        {
            //var pS = new double[] { 1.0 / 3, 1.0 / 3, 1.0 / 3 };
            //var p4N = new double[] { 1.0 / 4, 1.0 / 4, 1.0 / 4, 1.0 / 4 };
            //var p6N = new double[] { 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6, 1.0 / 6 };
            //var p8N = new double[] { 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8, 1.0 / 8 };
            //var pN = new double[][] { p4N, p6N, p8N };

            int[][] sequences = new int[][]
            {
                new  int[] { 1, 1 },
                new  int[] { 2, 6 },
                new  int[] { 2, 3 }
            };

            HiddenMarkovModel hmm = new HiddenMarkovModel(3, 8);
            var teacher           = new BaumWelchLearning(hmm)
            {
                Tolerance = 0.0001, Iterations = 0
            };

            var    m2 = teacher.Learn(sequences);
            double l0 = Math.Exp(m2.Evaluate(new int[] { 1, 6, 3 }));
        }
예제 #29
0
        public static HMMGroup CreateHiddenMarkovModel(IEnumerable <string> fileNames, int?length = null, SplitAlgorithm algo = SplitAlgorithm.Simple, bool ignoreCase = false)
        {
            string[][] ngrams = null;
            switch (algo)
            {
            case SplitAlgorithm.Simple:
                ngrams = GetNgramsSimple(fileNames, length ?? 8);
                break;

            case SplitAlgorithm.Pairs:
                ngrams = GetNgramsPairs(fileNames, length ?? 8);
                break;

            case SplitAlgorithm.ByWord:
                ngrams = GetNgrams(fileNames, length, true, ignoreCase);
                break;
            }

            var codebook = new Codification("data", ngrams);
            var sequence = codebook.ParallelTransform("data", ngrams);

            ngrams = null;

            var topology = new Forward(states: 4);
            int symbols  = codebook["data"].NumberOfSymbols;
            var hmm      = new HiddenMarkovModel(topology, symbols);
            var teacher  = new BaumWelchLearning(hmm);

            teacher.Learn(sequence);
            return(new HMMGroup
            {
                Model = hmm,
                Codebook = codebook,
                Length = length
            });
        }
예제 #30
0
        public void PredictTest()
        {
            int[][] sequences = new int[][]
            {
                new int[] { 0, 3, 1, 2 },
            };


            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 4);

            var teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 1e-10, Iterations = 0
            };
            double ll = teacher.Run(sequences);

            double l11, l12, l13, l14;

            int p1 = hmm.Predict(new int[] { 0 }, 1, out l11)[0];
            int p2 = hmm.Predict(new int[] { 0, 3 }, 1, out l12)[0];
            int p3 = hmm.Predict(new int[] { 0, 3, 1 }, 1, out l13)[0];
            int p4 = hmm.Predict(new int[] { 0, 3, 1, 2 }, 1, out l14)[0];

            Assert.AreEqual(3, p1);
            Assert.AreEqual(1, p2);
            Assert.AreEqual(2, p3);
            Assert.AreEqual(2, p4);

            double l21 = hmm.Evaluate(new int[] { 0, 3 });
            double l22 = hmm.Evaluate(new int[] { 0, 3, 1 });
            double l23 = hmm.Evaluate(new int[] { 0, 3, 1, 2 });
            double l24 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(l11, l21, 1e-10);
            Assert.AreEqual(l12, l22, 1e-10);
            Assert.AreEqual(l13, l23, 1e-10);
            Assert.AreEqual(l14, l24, 1e-10);

            Assert.IsFalse(double.IsNaN(l11));
            Assert.IsFalse(double.IsNaN(l12));
            Assert.IsFalse(double.IsNaN(l13));
            Assert.IsFalse(double.IsNaN(l14));

            Assert.IsFalse(double.IsNaN(l21));
            Assert.IsFalse(double.IsNaN(l22));
            Assert.IsFalse(double.IsNaN(l23));
            Assert.IsFalse(double.IsNaN(l24));

            double ln1;

            int[] pn = hmm.Predict(new int[] { 0 }, 4, out ln1);

            Assert.AreEqual(4, pn.Length);
            Assert.AreEqual(3, pn[0]);
            Assert.AreEqual(1, pn[1]);
            Assert.AreEqual(2, pn[2]);
            Assert.AreEqual(2, pn[3]);

            double ln2 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(ln1, ln2, 1e-10);
        }