Predict() public méthode

Predicts the next observation occurring after a given observation sequence.
public Predict ( int observations, double &probabilities ) : int
observations int
probabilities double
Résultat int
Exemple #1
0
        public HMMGenerator(PatchNames instrument)
        {
            this.book = new Codebook<Note>();
            this.instrument = instrument;

            DotNetLearn.Data.SampleSet asdasd;

            Accord.Math.Tools.SetupGenerator(10);

            // Consider some phrases:
            //
            string[][] phrases =
            {
            "The Big Brown Fox Jumps Over the Ugly Dog".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "This is too hot to handle".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "I am flying away like a gold eagle".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "Onamae wa nan desu ka".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "And then she asked, why is it so small?".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "Great stuff John! Now you will surely be promoted".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            "Jayne was taken aback when she found out her son was gay".Split(new char[]{' '},  StringSplitOptions.RemoveEmptyEntries),
            };

            // Let's begin by transforming them to sequence of
            // integer labels using a codification codebook:
            var codebook = new Codification("Words", phrases);

            // Now we can create the training data for the models:
            int[][] sequence = codebook.Translate("Words", phrases);

            // To create the models, we will specify a forward topology,
            // as the sequences have definite start and ending points.
            //
            var topology = new Forward(states: codebook["Words"].Symbols);
            int symbols = codebook["Words"].Symbols; // We have 7 different words

            // Create the hidden Markov model
            HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols);

            // Create the learning algorithm
            var teacher = new ViterbiLearning(hmm);

            // Teach the model about the phrases
            double error = teacher.Run(sequence);

            // Now, we can ask the model to generate new samples
            // from the word distributions it has just learned:
            //
            List<int> sample = new List<int>();
            int count = 10;
            sample.Add(hmm.Generate(1)[0]);
            while(sample.Count < count)
            {
                var k = hmm.Predict(sample.ToArray(), 1);
                sample.AddRange(k);
            }

            // And the result will be: "those", "are", "words".
            string[] result = codebook.Translate("Words", sample.ToArray());
        }
        public void PredictTest3()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences = 
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 4, 3, 5 },
                new[] { 1, 2, 5 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int[] input = { 1, 2 }; // base sequence for prediction

            double[] logLikelihoods;

            // Predict the next observation in sequence
            int prediction = hmm.Predict(input, out logLikelihoods);

            var probs = Matrix.Exp(logLikelihoods);

            // At this point, prediction probabilities
            // should be equilibrated around 3, 4 and 5
            Assert.AreEqual(probs.Length, 6);
            Assert.AreEqual(probs[0], 0.00, 0.01);
            Assert.AreEqual(probs[1], 0.00, 0.01);
            Assert.AreEqual(probs[2], 0.00, 0.01);
            Assert.AreEqual(probs[3], 0.33, 0.05);
            Assert.AreEqual(probs[4], 0.33, 0.05);
            Assert.AreEqual(probs[5], 0.33, 0.05);


            double[][] probabilities2;

            // Predict the next 2 observation2 in sequence
            int[] prediction2 = hmm.Predict(input, 2, out probabilities2);

            Assert.AreEqual(probabilities2.Length, 2);
            Assert.AreEqual(probabilities2[0].Length, 6);
            Assert.AreEqual(probabilities2[1].Length, 6);

            Assert.IsTrue(probabilities2[0].IsEqual(logLikelihoods));
        }
        public void PredictTest2()
        {
            // We will try to create a Hidden Markov Model which
            // can recognize (and predict) the following sequences:
            int[][] sequences = 
            {
                new[] { 1, 2, 3, 4, 5 },
                new[] { 1, 2, 3, 3, 5 },
                new[] { 1, 2, 3 },
            };

            // Creates a new left-to-right (forward) Hidden Markov Model
            //  with 4 states for an output alphabet of six characters.
            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 6);

            // Try to fit the model to the data until the difference in
            //  the average log-likelihood changes only by as little as 0.0001
            BaumWelchLearning teacher = new BaumWelchLearning(hmm)
            {
                Tolerance = 0.0001,
                Iterations = 0
            };

            // Run the learning algorithm on the model
            double logLikelihood = teacher.Run(sequences);

            // Now, we will try to predict the next
            //   observations after a base sequence

            int length = 1; // number of observations to predict
            int[] input = { 1, 2 }; // base sequence for prediction

            // Predict the next 1 observation in sequence
            int[] prediction = hmm.Predict(input, length);

            // At this point, prediction should be int[] { 3 }
            Assert.AreEqual(prediction.Length, 1);
            Assert.AreEqual(prediction[0], 3);
        }
        public void PredictTest()
        {
            int[][] sequences = new int[][] 
            {
                new int[] { 0, 3, 1, 2 },
            };


            HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(4), 4);

            var teacher = new BaumWelchLearning(hmm) { Tolerance = 1e-10, Iterations = 0 };
            double ll = teacher.Run(sequences);

            double l11, l12, l13, l14;

            int p1 = hmm.Predict(new int[] { 0 }, 1, out l11)[0];
            int p2 = hmm.Predict(new int[] { 0, 3 }, 1, out l12)[0];
            int p3 = hmm.Predict(new int[] { 0, 3, 1 }, 1, out l13)[0];
            int p4 = hmm.Predict(new int[] { 0, 3, 1, 2 }, 1, out l14)[0];

            Assert.AreEqual(3, p1);
            Assert.AreEqual(1, p2);
            Assert.AreEqual(2, p3);
            Assert.AreEqual(2, p4);

            double l21 = hmm.Evaluate(new int[] { 0, 3 });
            double l22 = hmm.Evaluate(new int[] { 0, 3, 1 });
            double l23 = hmm.Evaluate(new int[] { 0, 3, 1, 2 });
            double l24 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(l11, l21, 1e-10);
            Assert.AreEqual(l12, l22, 1e-10);
            Assert.AreEqual(l13, l23, 1e-10);
            Assert.AreEqual(l14, l24, 1e-10);

            Assert.IsFalse(double.IsNaN(l11));
            Assert.IsFalse(double.IsNaN(l12));
            Assert.IsFalse(double.IsNaN(l13));
            Assert.IsFalse(double.IsNaN(l14));

            Assert.IsFalse(double.IsNaN(l21));
            Assert.IsFalse(double.IsNaN(l22));
            Assert.IsFalse(double.IsNaN(l23));
            Assert.IsFalse(double.IsNaN(l24));

            double ln1;
            int[] pn = hmm.Predict(new int[] { 0 }, 4, out ln1);

            Assert.AreEqual(4, pn.Length);
            Assert.AreEqual(3, pn[0]);
            Assert.AreEqual(1, pn[1]);
            Assert.AreEqual(2, pn[2]);
            Assert.AreEqual(2, pn[3]);

            double ln2 = hmm.Evaluate(new int[] { 0, 3, 1, 2, 2 });

            Assert.AreEqual(ln1, ln2, 1e-10);
        }
        public void PredictTest2()
        {
            // Create continuous sequences. In the sequence below, there
            // seems to be two states, one for values equal to 1 and another
            // for values equal to 2.
            double[][] sequences = new double[][] 
            {
                new double[] { 1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2,1,2 }             
            };

            // Specify a initial normal distribution for the samples.
            NormalDistribution density = new NormalDistribution();

            // Creates a continuous hidden Markov Model with two states organized in a forward
            //  topology and an underlying univariate Normal distribution as probability density.
            var model = new HiddenMarkovModel<NormalDistribution>(new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier until the
            // difference in the average log-likelihood changes only by as little as 0.0001
            var teacher = new BaumWelchLearning<NormalDistribution>(model)
            {
                Tolerance = 0.0001,
                Iterations = 0,

                // However, we will need to specify a regularization constant as the
                //  variance of each state will likely be zero (all values are equal)
                FittingOptions = new NormalOptions() { Regularization = double.Epsilon }
            };

            // Fit the model
            double likelihood = teacher.Run(sequences);


            double a1 = model.Predict(new double[] { 1, 2, 1 });
            double a2 = model.Predict(new double[] { 1, 2, 1, 2 });

            Assert.AreEqual(2, a1, 1e-10);
            Assert.AreEqual(1, a2, 1e-10);
            Assert.IsFalse(Double.IsNaN(a1));
            Assert.IsFalse(Double.IsNaN(a2));

            double p1, p2;
            Mixture<NormalDistribution> d1, d2;
            double b1 = model.Predict(new double[] { 1, 2, 1 }, out p1, out d1);
            double b2 = model.Predict(new double[] { 1, 2, 1, 2 }, out p2, out d2);

            Assert.AreEqual(2, b1, 1e-10);
            Assert.AreEqual(1, b2, 1e-10);
            Assert.IsFalse(Double.IsNaN(b1));
            Assert.IsFalse(Double.IsNaN(b2));

            Assert.AreEqual(0, d1.Coefficients[0]);
            Assert.AreEqual(1, d1.Coefficients[1]);

            Assert.AreEqual(1, d2.Coefficients[0]);
            Assert.AreEqual(0, d2.Coefficients[1]);
        }
Exemple #6
0
        private TRADETYPE PredictNextTrade()
        {
            var res = TRADETYPE.WINNING;
            if (_tradeReturns.Count == 4) {
                HiddenMarkovModel hmm = new HiddenMarkovModel(states: 3, symbols: 3);
                int[] observationSequence = GetSequence (_tradeReturns);
                BaumWelchLearning teacher = new BaumWelchLearning(hmm);

                // and call its Run method to start learning
                double error = teacher.Run(observationSequence);
                int[] predict = hmm.Predict (observationSequence, 1);
                if (predict [0] == 0) {
                    res = TRADETYPE.LOSING;
                } else if (predict [0] == 1) {
                    res = TRADETYPE.NEUTRAL;
                } else if (predict [0] == 2) {
                    res = TRADETYPE.WINNING;
                }

            }
            return res;
        }