Exemple #1
0
        public void PredictTest2()
        {
            // Create continuous sequences. In the sequence below, there
            // seems to be two states, one for values equal to 1 and another
            // for values equal to 2.
            double[][] sequences = new double[][]
            {
                new double[] { 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2, 1, 2 }
            };

            // Specify a initial normal distribution for the samples.
            NormalDistribution density = new NormalDistribution();

            // Creates a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel <NormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning <NormalDistribution>(model)
            {
                Tolerance  = 0.0001,
                Iterations = 0,

                // However, we will need to specify a regularization constant as the
                //  variance of each state will likely be zero (all values are equal)
                FittingOptions = new NormalOptions()
                {
                    Regularization = double.Epsilon
                }
            };

            // Fit the model
            double likelihood = teacher.Run(sequences);


            double a1 = model.Predict(new double[] { 1, 2, 1 });
            double a2 = model.Predict(new double[] { 1, 2, 1, 2 });

            Assert.AreEqual(2, a1, 1e-10);
            Assert.AreEqual(1, a2, 1e-10);
            Assert.IsFalse(Double.IsNaN(a1));
            Assert.IsFalse(Double.IsNaN(a2));

            double p1, p2;
            Mixture <NormalDistribution> d1, d2;
            double b1 = model.Predict(new double[] { 1, 2, 1 }, out p1, out d1);
            double b2 = model.Predict(new double[] { 1, 2, 1, 2 }, out p2, out d2);

            Assert.AreEqual(2, b1, 1e-10);
            Assert.AreEqual(1, b2, 1e-10);
            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            Assert.AreEqual(0, d1.Coefficients[0]);
            Assert.AreEqual(1, d1.Coefficients[1]);

            Assert.AreEqual(1, d2.Coefficients[0]);
            Assert.AreEqual(0, d2.Coefficients[1]);
        }
Exemple #2
0
        public void PredictTest3()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 4, 3, 5 },
                new[] { 1, 2,5 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int[] input = { 1, 2 }; // base sequence for prediction

            double[] logLikelihoods;

            // Predict the next observation in sequence
            int prediction = hmm.Predict(input, out logLikelihoods);

            var probs = Matrix.Exp(logLikelihoods);

            // At this point, prediction probabilities
            // should be equilibrated around 3, 4 and 5
            Assert.AreEqual(probs.Length, 6);
            Assert.AreEqual(probs[0], 0.00, 0.01);
            Assert.AreEqual(probs[1], 0.00, 0.01);
            Assert.AreEqual(probs[2], 0.00, 0.01);
            Assert.AreEqual(probs[3], 0.33, 0.05);
            Assert.AreEqual(probs[4], 0.33, 0.05);
            Assert.AreEqual(probs[5], 0.33, 0.05);


            double[][] probabilities2;

            // Predict the next 2 observation2 in sequence
            int[] prediction2 = hmm.Predict(input, 2, out probabilities2);

            Assert.AreEqual(probabilities2.Length, 2);
            Assert.AreEqual(probabilities2[0].Length, 6);
            Assert.AreEqual(probabilities2[1].Length, 6);

            Assert.IsTrue(probabilities2[0].IsEqual(logLikelihoods));
        }
Exemple #3
0
        public void learn_predict()
        {
            #region doc_predict
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 3, 5, 7,  9, 11, 13 },
                new[] { 1, 3, 5, 7,  9,11 },
                new[] { 1, 3, 5, 7,  9, 11, 13 },
                new[] { 1, 3, 3, 7,  7,  9,11, 11, 13, 13 },
                new[] { 1, 3, 7, 9, 11,13 },
            };

            // Create a Baum-Welch HMM algorithm:
            var teacher = new BaumWelchLearning()
            {
                // Let's creates a left-to-right (forward)
                // Hidden Markov Model with 7 hidden states
                Topology = new Forward(7),

                // We'll try to fit the model to the data until the difference in
                // the average log-likelihood changes only by as little as 0.0001
                Tolerance  = 0.0001,
                Iterations = 0 // do not impose a limit on the number of iterations
            };

            // Use the algorithm to learn a new Markov model:
            HiddenMarkovModel hmm = teacher.Learn(sequences);

            // Now, we will try to predict the next 1 observation in a base symbol sequence
            int[] prediction = hmm.Predict(observations: new[] { 1, 3, 5, 7, 9 }, next: 1);

            // At this point, prediction should be int[] { 11 }
            int nextSymbol = prediction[0]; // should be 11.

            // We can try to predict further, but this might not work very
            // well due the Markov assumption between the transition states:
            int[] nextSymbols = hmm.Predict(observations: new[] { 1, 3, 5, 7 }, next: 2);

            // At this point, nextSymbols should be int[] { 9, 11 }
            int nextSymbol1 = nextSymbols[0]; // 9
            int nextSymbol2 = nextSymbols[1]; // 11
            #endregion

            Assert.AreEqual(9, nextSymbol1);
            Assert.AreEqual(11, nextSymbol2);

            Assert.AreEqual(prediction.Length, 1);
            Assert.AreEqual(11, prediction[0]);

            Assert.AreEqual(2, nextSymbols.Length);
            Assert.AreEqual(new[] { 9, 11 }, nextSymbols);
        }
        private Tradetype PredictNextTrade()
        {
            var res = Tradetype.Winning;

            if (_tradeReturns.IsReady)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(states: 3, symbols: 3);
                int[]             observationSequence = GetSequence();
                BaumWelchLearning teacher             = new BaumWelchLearning(hmm);

                // and call its Run method to start learning
                double error   = teacher.Run(observationSequence);
                int[]  predict = hmm.Predict(observationSequence, 1);

                if (predict[0] == 0)
                {
                    res = Tradetype.Losing;
                }
                else if (predict[0] == 1)
                {
                    res = Tradetype.Neutral;
                }
                else if (predict[0] == 2)
                {
                    res = Tradetype.Winning;
                }
            }

            return(res);
        }
        private TRADETYPE PredictNextTrade()
        {
            var res = TRADETYPE.WINNING;

            if (_tradeReturns.Count == 4)
            {
                HiddenMarkovModel hmm = new HiddenMarkovModel(states: 3, symbols: 3);
                int[]             observationSequence = GetSequence(_tradeReturns);
                BaumWelchLearning teacher             = new BaumWelchLearning(hmm);

                // and call its Run method to start learning
                double error   = teacher.Run(observationSequence);
                int[]  predict = hmm.Predict(observationSequence, 1);
                if (predict [0] == 0)
                {
                    res = TRADETYPE.LOSING;
                }
                else if (predict [0] == 1)
                {
                    res = TRADETYPE.NEUTRAL;
                }
                else if (predict [0] == 2)
                {
                    res = TRADETYPE.WINNING;
                }
            }
            return(res);
        }
Exemple #6
0
        public void PredictTest2()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences =
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 3, 3, 5 },
                new[] { 1, 2,3 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance  = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int length = 1;         // number of observations to predict

            int[] input = { 1, 2 }; // base sequence for prediction

            // Predict the next 1 observation in sequence
            int[] prediction = hmm.Predict(input, length);

            // At this point, prediction should be int[] { 3 }
            Assert.AreEqual(prediction.Length, 1);
            Assert.AreEqual(prediction[0], 3);
        }
Exemple #7
0
        public void PredictTest()
        {
            int[][] sequences = new int[][]
            {
                new int[] { 0, 3, 1, 2 },
            };


            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 4);

            var teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 1e-10, Iterations = 0
            };
            double ll = teacher.Run(sequences);

            double l11, l12, l13, l14;

            int p1 = hmm.Predict(new int[] { 0 }, 1, out l11)[0];
            int p2 = hmm.Predict(new int[] { 0, 3 }, 1, out l12)[0];
            int p3 = hmm.Predict(new int[] { 0, 3, 1 }, 1, out l13)[0];
            int p4 = hmm.Predict(new int[] { 0, 3, 1, 2 }, 1, out l14)[0];

            Assert.AreEqual(3, p1);
            Assert.AreEqual(1, p2);
            Assert.AreEqual(2, p3);
            Assert.AreEqual(2, p4);

            double l21 = hmm.Evaluate(new int[] { 0, 3 });
            double l22 = hmm.Evaluate(new int[] { 0, 3, 1 });
            double l23 = hmm.Evaluate(new int[] { 0, 3, 1, 2 });
            double l24 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(l11, l21, 1e-10);
            Assert.AreEqual(l12, l22, 1e-10);
            Assert.AreEqual(l13, l23, 1e-10);
            Assert.AreEqual(l14, l24, 1e-10);

            Assert.IsFalse(double.IsNaN(l11));
            Assert.IsFalse(double.IsNaN(l12));
            Assert.IsFalse(double.IsNaN(l13));
            Assert.IsFalse(double.IsNaN(l14));

            Assert.IsFalse(double.IsNaN(l21));
            Assert.IsFalse(double.IsNaN(l22));
            Assert.IsFalse(double.IsNaN(l23));
            Assert.IsFalse(double.IsNaN(l24));

            double ln1;

            int[] pn = hmm.Predict(new int[] { 0 }, 4, out ln1);

            Assert.AreEqual(4, pn.Length);
            Assert.AreEqual(3, pn[0]);
            Assert.AreEqual(1, pn[1]);
            Assert.AreEqual(2, pn[2]);
            Assert.AreEqual(2, pn[3]);

            double ln2 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(ln1, ln2, 1e-10);
        }