public static HiddenMarkovClassifier<NormalDistribution> CreateModel1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, // This is the first  sequence with label = 0
                new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution density = new NormalDistribution();
            var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            return classifier;
        }
        public int Recognize(ISoundSignalReader signal, HiddenMarkovClassifier hmm, out string name,
            SignalVisitor voiceVisitor = null)
        {
            var featureUtility = new FeatureUtility(_engineParameters);
            signal.Reset();
            var features = featureUtility.ExtractFeatures(signal, voiceVisitor).First();
            var observations = _codeBook.Quantize(features.Select(item => new Point(item)).ToArray());

            double[] responsabilities;
            var ret = hmm.Compute(observations, out responsabilities);

            var models = hmm.Models;
            var likelyHoodValue = Double.MinValue;
            name = string.Empty;

            foreach (var model in models)
            {
                var val = model.Evaluate(observations);
                if (val > likelyHoodValue)
                {
                    likelyHoodValue = val;
                    name = model.Tag.ToString();
                }
            }

            return ret;
        }
示例#3
0
        /// <summary>
        ///   Creates the ensemble
        /// </summary>
        private void btnCreate_Click(object sender, EventArgs e)
        {
            DataTable source = dgvSequenceSource.DataSource as DataTable;

            if (source == null)
            {
                MessageBox.Show("Please load some data by clicking 'Open' under the 'File' menu first. " +
                    "A sample dataset can be found in the folder 'Resources' contained in the same " +
                    "directory as this application.");
                return;
            }

            DataTable k = source.DefaultView.ToTable(true, "Label", "States");

            // Get the number of different classes in the data
            int classes = k.Rows.Count;

            string[] categories = new string[classes];
            int[] states = new int[classes];
            for (int i = 0; i < classes; i++)
            {
                // Gets the label name
                categories[i] = k.Rows[i]["Label"] as string;

                // Gets the number of states to recognize each label
                states[i] = int.Parse(k.Rows[i]["States"] as string);
            }


            hmmc = new HiddenMarkovClassifier(classes, states, 3, categories);

            dgvModels.DataSource = hmmc.Models;
        }
        public void LearnTest()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,1,1,0 },   // Class 0
                new int[] { 0,0,1,0 },   // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,0 },     // Class 0

                new int[] { 1,0,0,1 },   // Class 1
                new int[] { 1,1,0,1 },   // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
                new int[] { 1,0,1 },     // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to two symbols (0 or 1)
            int symbols = 2;

            // Nested models will have two states each
            int[] states = new int[] { 2, 2 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double likelihood = teacher.Run(inputs, outputs);


            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual = classifier.Compute(inputs[i], out likelihood);
                Assert.AreEqual(expected, actual);
            }
        }
        public void LearnTest1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, // This is the first  sequence with label = 0
                new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution density = new NormalDistribution();
            var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood1, likelihood2;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Compute(sequences[0], out likelihood1);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Compute(sequences[1], out likelihood2);

            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);


            Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10);
            Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10);
            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(likelihood1));
            Assert.IsFalse(double.IsNaN(likelihood2));
        }
        public static HiddenMarkovClassifier<Independent> CreateModel1()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0 },
                    new double[] { 1 },
                    new double[] { 2 },
                    new double[] { 3 },
                    new double[] { 4 },
                }, 

                new double[][]
                {
                     // This is the second sequence with label = 1
                    new double[] { 4 },
                    new double[] { 3 },
                    new double[] { 2 },
                    new double[] { 1 },
                    new double[] { 0 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            NormalDistribution component = new NormalDistribution();
            Independent density = new Independent(component);
            var classifier = new HiddenMarkovClassifier<Independent>(2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            Assert.AreEqual(-13.271981026832929d, logLikelihood);

            return classifier;
        }
        public static HiddenMarkovClassifier CreateModel1()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,1,1,0 },   // Class 0
                new int[] { 0,0,1,0 },   // Class 0
                new int[] { 0,1,1,1,0 }, // Class 0
                new int[] { 0,1,0 },     // Class 0

                new int[] { 1,0,0,1 },   // Class 1
                new int[] { 1,1,0,1 },   // Class 1
                new int[] { 1,0,0,0,1 }, // Class 1
                new int[] { 1,0,1 },     // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to two symbols (0 or 1)
            int symbols = 2;

            // Nested models will have two states each
            int[] states = new int[] { 2, 2 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            );

            // Train the sequence classifier using the algorithm
            double likelihood = teacher.Run(inputs, outputs);

            return classifier;
        }
        public static void LearnAndPredictContinuous()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a univariate sequence and the same sequence backwards.
            double[][] sequences = new double[][]
            {
                new double[] { 0, 1, 2, 3, 4 }, // This is the first  sequence with label = 0
                new double[] { 4, 3, 2, 1, 0 }, // This is the second sequence with label = 1
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };

            // Creates a new Continuous-density Hidden Markov Model Sequence Classifier
            //  containing 2 hidden Markov Models with 2 states and an underlying Normal
            //  distribution as the continuous probability density.
            Gaussian density    = new Gaussian();
            var      classifier = new HiddenMarkovClassifier(2, new Ergodic(2), density);

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                                                             // Train each model until the log-likelihood changes less than 0.001
                                                             modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0
            }
                                                             );

            // Train the sequence classifier using the algorithm
            teacher.Run(sequences, labels);


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood;

            // Try to classify the first sequence (output should be 0)
            int c1 = classifier.Compute(sequences[0], out likelihood);

            Console.WriteLine("c1: {0}", c1);

            // Try to classify the second sequence (output should be 1)
            int c2 = classifier.Compute(sequences[1], out likelihood);

            Console.WriteLine("c2: {0}", c2);
        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier <NormalDistribution> model = CreateModel1();

            var target = new MarkovContinuousFunction(model);

            var features = target.Features;

            double[] weights = target.Weights;

            Assert.AreEqual(26, features.Length);
            Assert.AreEqual(26, weights.Length);

            int k = 0;

            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                {
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].States; j++)
                    {
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);
                    }
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].Dimension; j++)
                    {
                        double mean = model[c].Emissions[i].Mean;
                        double var  = model[c].Emissions[i].Variance;

                        double l2ps = System.Math.Log(2 * System.Math.PI * var);

                        Assert.AreEqual(-0.5 * (l2ps + (mean * mean) / var), weights[k++]);
                        Assert.AreEqual(mean / var, weights[k++]);
                        Assert.AreEqual(-1.0 / (2 * var), weights[k++]);
                    }
                }
            }
        }
示例#10
0
        /// <summary>
        ///   Creates a new <see cref="RunningMarkovClassifier"/>.
        /// </summary>
        ///
        /// <param name="model">The hidden Markov classifier model.</param>
        ///
        public RunningMarkovClassifier(HiddenMarkovClassifier model)
        {
            this.Classifier = model;

            this.Responses = new double[model.Classes];
            this.models    = new RunningMarkovStatistics[model.Classes];
            for (int i = 0; i < model.Classes; i++)
            {
                this.models[i] = new RunningMarkovStatistics(model[i]);
            }

            if (model.Threshold != null)
            {
                this.threshold = new RunningMarkovStatistics(model.Threshold);
            }
        }
示例#11
0
        public void RunTest()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new HiddenQuasiNewtonLearning <int>(model);

            double[] actual   = new double[inputs.Length];
            double[] expected = new double[inputs.Length];

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            double ll0 = model.LogLikelihood(inputs, outputs);

            double error = target.Run(inputs, outputs);

            double ll1 = model.LogLikelihood(inputs, outputs);

            for (int i = 0; i < inputs.Length; i++)
            {
                actual[i]   = model.Compute(inputs[i]);
                expected[i] = outputs[i];
            }

            Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10);
            Assert.AreEqual(0.0, error, 1e-10);
            Assert.AreEqual(error, -ll1);
            Assert.IsFalse(Double.IsNaN(ll0));
            Assert.IsFalse(Double.IsNaN(error));

            for (int i = 0; i < inputs.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i]);
            }

            Assert.IsTrue(ll1 > ll0);
        }
        public HiddenMarkovClassifierLearning(HiddenMarkovClassifier classifier, HiddenMarkovModelLearningAlgorithmEntity algorithm = null)
        {
            mClassifier = classifier;

            int class_count = classifier.ClassCount;

            mAlgorithmEntity = algorithm;

            if (mAlgorithmEntity == null)
            {
                mAlgorithmEntity = model_index => new BaumWelchLearning(classifier.Models[model_index])
                {
                    Tolerance  = 0.001,
                    Iterations = 0
                };
            }
        }
        public void ForwardTest2()
        {
            HiddenMarkovClassifier <Independent> hmm = IndependentMarkovClassifierPotentialFunctionTest
                                                       .CreateModel3();

            var function = new MarkovMultivariateFunction(hmm, includePriors: false);

            double[][][] observations = IndependentMarkovClassifierPotentialFunctionTest.sequences2;
            int[]        labels       = IndependentMarkovClassifierPotentialFunctionTest.labels2;

            foreach (double[][] x in observations)
            {
                foreach (int y in labels)
                {
                    double[] scaling1;
                    double   logLikelihood1;

                    double[,] actual = Accord.Statistics.Models.Fields.
                                       ForwardBackwardAlgorithm.Forward(function.Factors[y], x, y, out scaling1, out logLikelihood1);

                    double[] scaling2;
                    double   logLikelihood2;
                    double[,] expected = Accord.Statistics.Models.Markov.
                                         ForwardBackwardAlgorithm.Forward(hmm.Models[y], x, out scaling2, out logLikelihood2);

                    for (int i = 0; i < actual.GetLength(0); i++)
                    {
                        for (int j = 0; j < actual.GetLength(1); j++)
                        {
                            Assert.AreEqual(expected[i, j], actual[i, j], 1e-10);
                            Assert.IsFalse(Double.IsNaN(actual[i, j]));
                        }
                    }

                    Assert.AreEqual(logLikelihood1, logLikelihood2, 1e-10);

                    for (int i = 0; i < scaling1.Length; i++)
                    {
                        Assert.AreEqual(scaling1[i], scaling2[i], 1e-8);
                        Assert.IsFalse(Double.IsNaN(scaling1[i]));
                        Assert.IsFalse(Double.IsNaN(scaling2[i]));
                    }
                }
            }
        }
示例#14
0
        public void TrainModel(int states = 5, int iterations = 0, double tolerance = 0.01, bool rejection = false)
        {
            var samples = DataStore.Samples;
            var labels  = DataStore.Labels;

            double[][][] inputs  = new double[samples.Count][][];
            int[]        outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            _hmm = new HiddenMarkovClassifier <NormalDistribution, double>(labels.Count,
                                                                           new Forward(states), new NormalDistribution(2), labels.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <NormalDistribution, double>(_hmm)
            {
                // Train each model until the log-likelihood changes less than 0.001
                Learner = modelIndex => new BaumWelchLearning <NormalDistribution, double>(_hmm.Models[modelIndex])
                {
                    Tolerance  = 0.0001,
                    Iterations = 0
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            // double error = teacher.Learn(inputs, outputs);


            // Classify all training instances
            foreach (var sample in samples)
            {
                // sample.RecognizedAs = _hmm.Compute(sample.Input);
            }
        }
示例#15
0
文件: Hmm.cs 项目: ejulio/signa
        public void Aprender(IDadosSinaisDinamicos dados)
        {
            var quantidadeCaracteristicas = dados.CaracteristicasSinais[0][0].Length;
            hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(
                classes: dados.QuantidadeClasses,
                topology: new Forward(QuantidadeEstados),
                initial: new MultivariateNormalDistribution(quantidadeCaracteristicas)
                );

            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm,
                modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,
                    FittingOptions = new NormalOptions { Regularization = 1e-5}
                });

            teacher.Run(dados.CaracteristicasSinais, dados.IdentificadoresSinais);
        }
        public void ComputeTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            double actual;
            double expected;

            int[] x = { 0, 1 };

            for (int c = 0; c < model.Classes; c++)
            {
                for (int i = 0; i < model[c].States; i++)
                {
                    // Check initial state transitions
                    double xa = model.Priors[c];
                    double xb = Math.Exp(model[c].Probabilities[i]);
                    double xc = Math.Exp(model[c].Emissions[i, x[0]]);
                    expected = xa * xb * xc;
                    actual   = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                    Assert.AreEqual(expected, actual, 1e-6);
                    Assert.IsFalse(double.IsNaN(actual));
                }

                for (int t = 1; t < x.Length; t++)
                {
                    // Check normal state transitions
                    for (int i = 0; i < model[c].States; i++)
                    {
                        for (int j = 0; j < model[c].States; j++)
                        {
                            double xb = Math.Exp(model[c].Transitions[i, j]);
                            double xc = Math.Exp(model[c].Emissions[j, x[t]]);
                            expected = xb * xc;
                            actual   = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                            Assert.AreEqual(expected, actual, 1e-6);
                            Assert.IsFalse(double.IsNaN(actual));
                        }
                    }
                }
            }
        }
        public void ComputeTest4()
        {
            int[]        labels;
            double[][][] words;
            HiddenMarkovClassifier <Independent <NormalDistribution> > model =
                CreateModel4(out words, out labels, false);

            var target = new MarkovMultivariateFunction(model);

            var hcrf = new HiddenConditionalRandomField <double[]>(target);


            Assert.AreEqual(3, model.Priors.Length);
            Assert.AreEqual(1 / 3.0, model.Priors[0]);
            Assert.AreEqual(1 / 3.0, model.Priors[1]);
            Assert.AreEqual(1 / 3.0, model.Priors[2]);

            check4(words, model, target, hcrf);
        }
        public void GradientTest_MarkovMultivariate()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model  = new HiddenConditionalRandomField <double[]>(function);
            var target = new ForwardBackwardGradient <double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length)
            {
                StepSize = 1e-5
            };

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
        private void LoadFromHMMFile_Click(object sender, EventArgs e)
        {
            OpenFileDialog dlg = new OpenFileDialog();

            dlg.Filter           = "Gestures (*.hmm)|*.hmm";
            dlg.Title            = "Load Gestures";
            dlg.RestoreDirectory = false;
            dlg.Multiselect      = true;

            if (dlg.ShowDialog(this) == DialogResult.OK)
            {
                _hmmc = HiddenMarkovClassifier.Load(dlg.FileName);
                _hmms = _hmmc.Models;
                if (_hmmc != null)
                {
                    lblResult.Text = "Success!!";
                }
                ReloadViewForm();
            }
        }
        public void LogForwardTest3()
        {
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs =
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var observations = inputs[0];

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                                            ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));

            double logLikelihood;

            double[,] actual = Accord.Statistics.Models.Fields.
                               ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double p = 0;

            for (int i = 0; i < hmm[0].States; i++)
            {
                p += Math.Exp(actual[observations.Length - 1, i]);
            }

            Assert.AreEqual(Math.Exp(logLikelihood), p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public void HiddenMarkovHiddenPotentialFunctionConstructorTest()
        {
            HiddenMarkovClassifier model = CreateModel1();

            MarkovDiscreteFunction target = new MarkovDiscreteFunction(model);

            var features = target.Features;

            double[] weights = target.Weights;

            Assert.AreEqual(22, features.Length);
            Assert.AreEqual(22, weights.Length);

            int k = 0;

            for (int c = 0; c < model.Classes; c++)
            {
                Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]);

                for (int i = 0; i < model[c].States; i++)
                {
                    Assert.AreEqual(model[c].Probabilities[i], weights[k++]);
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model[c].States; j++)
                    {
                        Assert.AreEqual(model[c].Transitions[i, j], weights[k++]);
                    }
                }

                for (int i = 0; i < model[c].States; i++)
                {
                    for (int j = 0; j < model.Symbols; j++)
                    {
                        Assert.AreEqual(model[c].Emissions[i, j], weights[k++]);
                    }
                }
            }
        }
示例#22
0
        private void button1_Click(object sender, EventArgs e)
        {
            var classes = 4;
            var states  = new[] { 1, 2, 2, 3 };
            var cat     = new[] { "ខ្ញុំ", "ទៅ", "ខ្លួន", "ក" };

            //var cat = new[] { "A", "B" };

            _hmmc = new HiddenMarkovClassifier(classes, states, 4, cat);

            // Train the ensemble
            var sequences = new[]
            {
                new[] { 1, 1, 1 },
                new[] { 0, 2 },
                new[] { 0, 1, 2 },
                new[] { 1, 2 }
            };

            var labels = new[] { 0, 1, 2, 3 };

            var teacher = new HiddenMarkovClassifierLearning(_hmmc, i =>
                                                             new BaumWelchLearning(_hmmc.Models[i])
            {
                Iterations = 0,
                Tolerance  = 0.0001
            }
                                                             );

            teacher.Run(sequences, labels);

            var m = _hmmc.Models;

            var    test = new[] { 1, 2 };
            double likelihood;
            var    label = _hmmc.Compute(test, out likelihood);

            MessageBox.Show(_hmmc.Models[label].Tag.ToString() + " P =" + likelihood);
        }
示例#23
0
        public void GradientTest2()
        {
            HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1();
            var function = new MarkovDiscreteFunction(hmm);

            var model  = new HiddenConditionalRandomField <int>(function);
            var target = new ForwardBackwardGradient <int>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters);

            double[] expected = diff.Compute(function.Weights);
            double[] actual   = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 1e-5);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }
示例#24
0
    private void button1_Click(object sender, EventArgs e)
    {
      var classes = 4;
      var states = new[]{1,2,2,3};
      var cat = new[] {"ខ្ញុំ", "ទៅ", "ខ្លួន", "ក"};
      //var cat = new[] { "A", "B" };

      _hmmc = new HiddenMarkovClassifier(classes, states, 4, cat);

      // Train the ensemble
      var sequences = new[]
                        {
                          new[] {1, 1, 1},
                          new[] {0, 2},
                          new[] {0, 1, 2},
                          new[] {1, 2}
                        };

      var labels = new[] {0, 1, 2, 3};

      var teacher = new HiddenMarkovClassifierLearning(_hmmc, i =>
          new BaumWelchLearning(_hmmc.Models[i])
          {
            Iterations = 0,
            Tolerance = 0.0001
          }
      );

      teacher.Run(sequences, labels);

      var m = _hmmc.Models;

      var test = new[]{1,2};
      double likelihood;
      var label = _hmmc.Compute(test, out likelihood);
      MessageBox.Show(_hmmc.Models[label].Tag.ToString()+ " P =" + likelihood);
    }
        public static HiddenMarkovClassifier<Independent> CreateModel3(out double[][][] sequences2, out int[] labels2)
        {
            sequences2 = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 1, 1.12, 2.41, 1.17, 9.3 },
                    new double[] { 1, 2.54, 1.45, 0.16, 4.5 },
                    new double[] { 1, 3.46, 2.63, 1.15, 9.2 },
                    new double[] { 1, 4.73, 0.41, 1.54, 5.5 },
                    new double[] { 2, 5.81, 2.42, 1.13, 9.1 },
                }, 

                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.49, 2.48, 1.18, 9.37 },
                    new double[] { 1, 2.18, 1.44, 2.19, 1.56 },
                    new double[] { 1, 3.77, 2.62, 1.10, 9.25 },
                    new double[] { 2, 4.76, 5.44, 3.58, 5.54 },
                    new double[] { 2, 5.85, 2.46, 1.16, 5.13 },
                    new double[] { 2, 4.84, 5.44, 3.54, 5.52 },
                    new double[] { 2, 5.83, 3.41, 1.22, 5.11 },
                }, 

                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 2, 1.11, 2.41, 1.12, 2.31 },
                    new double[] { 1, 2.52, 3.73, 0.12, 4.50 },
                    new double[] { 1, 3.43, 2.61, 1.24, 9.29 },
                    new double[] { 1, 4.74, 2.42, 2.55, 6.57 },
                    new double[] { 2, 5.85, 2.43, 1.16, 9.16 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 0,  1.26, 5.44, 1.56, 9.55 },
                    new double[] { 2,  2.67, 5.45, 4.27, 1.54 },
                    new double[] { 1,  1.28, 3.46, 2.18, 4.13 },
                    new double[] { 1,  5.89, 2.57, 1.79, 5.02 },
                    new double[] { 0,  1.40, 2.48, 2.10, 6.41 },
                },

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 2,  3.21, 2.49, 1.54, 9.17 },
                    new double[] { 2,  2.62, 5.40, 4.25, 1.54 },
                    new double[] { 1,  1.53, 6.49, 2.17, 4.52 },
                    new double[] { 1,  2.84, 2.58, 1.73, 6.04 },
                    new double[] { 1,  1.45, 2.47, 2.28, 5.42 },
                    new double[] { 1,  1.46, 2.46, 2.35, 5.41 },
                },

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 1,  5.27, 5.45, 1.4, 9.5 },
                    new double[] { 2,  2.68, 2.54, 3.2, 2.2 },
                    new double[] { 1,  2.89, 3.83, 2.6, 4.1 },
                    new double[] { 1,  1.80, 1.32, 1.2, 4.2 },
                    new double[] { 0,  1.41, 2.41, 2.1, 6.4 },
                }
            };

            labels2 = new[] { 0, 0, 0, 1, 1, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var comp3 = new NormalDistribution(2);
            var comp4 = new NormalDistribution(3);
            var comp5 = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            return classifier;
        }
示例#26
0
        private void btnLearnHMM_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList<Sequence> samples = database.Samples;
            BindingList<String> classes = database.Classes;

            double[][][] inputs = new double[samples.Count][][];
            int[] outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i] = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            int states = 5;
            int iterations = 0;
            double tolerance = 0.01;
            bool rejection = false;


            hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(classes.Count,
                new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm,

                // Train each model using the selected convergence criteria
                i => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[i])
                {
                    Tolerance = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            );

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            double error = teacher.Run(inputs, outputs);


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = hmm.Compute(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                    Color.LightGreen : Color.White;
            }

            btnLearnHCRF.Enabled = true;
        }
 /// <summary>
 ///   Creates a new instance of the learning algorithm for a given
 ///   Markov sequence classifier.
 /// </summary>
 ///
 public HiddenMarkovClassifierLearning(HiddenMarkovClassifier <TDistribution, TObservation> classifier)
     : base(classifier)
 {
 }
        private static void check4(double[][][] words, HiddenMarkovClassifier<Independent> model, MarkovMultivariateFunction target, HiddenConditionalRandomField<double[]> hcrf)
        {
            double actual;
            double expected;

            foreach (var x in words)
            {
                for (int c = 0; c < model.Classes; c++)
                {
                    for (int i = 0; i < model[c].States; i++)
                    {
                        // Check initial state transitions
                        double xa = model.Priors[c];
                        double xb = Math.Exp(model[c].Probabilities[i]);
                        double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]);
                        expected = xa * xb * xc;
                        actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c));
                        Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10));
                        Assert.IsFalse(double.IsNaN(actual));
                    }

                    for (int t = 1; t < x.Length; t++)
                    {
                        // Check normal state transitions
                        for (int i = 0; i < model[c].States; i++)
                        {
                            for (int j = 0; j < model[c].States; j++)
                            {
                                double xb = Math.Exp(model[c].Transitions[i, j]);
                                double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]);
                                expected = xb * xc;
                                actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c));
                                Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10));
                                Assert.IsFalse(double.IsNaN(actual));
                            }
                        }
                    }

                    actual = Math.Exp(model.LogLikelihood(x, c));
                    expected = Math.Exp(hcrf.LogLikelihood(x, c));
                    Assert.AreEqual(expected, actual, 1e-10);
                    Assert.IsFalse(double.IsNaN(actual));

                    actual = model.Compute(x);
                    expected = hcrf.Compute(x);
                    Assert.AreEqual(expected, actual);
                    Assert.IsFalse(double.IsNaN(actual));
                }
            }
        }
示例#29
0
        private void openDataDialog_FileOk(object sender, System.ComponentModel.CancelEventArgs e)
        {
            hmm = null;
            hcrf = null;

            using (var stream = openDataDialog.OpenFile())
                database.Load(stream);

            btnLearnHMM.Enabled = true;
            btnLearnHCRF.Enabled = false;

            panelClassification.Visible = false;
            panelUserLabeling.Visible = false;
        }
        /// <summary>
        ///   Constructs a new potential function modeling Hidden Markov Models.
        /// </summary>
        ///
        /// <param name="classifier">The classifier model.</param>
        /// <param name="includePriors">True to include class features (priors), false otherwise.</param>
        ///
        public MarkovDiscreteFunction(HiddenMarkovClassifier <GeneralDiscreteDistribution, int> classifier, bool includePriors = true)
        {
            this.Symbols = classifier.Models[0].Emissions[0].Length;
            this.Outputs = classifier.Classes;

            int factorIndex    = 0;
            var factorParams   = new List <double>();
            var factorFeatures = new List <IFeature <int> >();

            this.Factors = new FactorPotential <int> [Outputs];

            int[] classOffset = new int[classifier.Classes];
            int[] edgeOffset  = new int[classifier.Classes];
            int[] stateOffset = new int[classifier.Classes];
            int[] classCount  = new int[classifier.Classes];
            int[] edgeCount   = new int[classifier.Classes];
            int[] stateCount  = new int[classifier.Classes];


            // Create features for initial class probabilities
            for (int c = 0; c < classifier.Classes; c++)
            {
                var stateParams   = new List <double>();
                var stateFeatures = new List <IFeature <int> >();

                var edgeParams   = new List <double>();
                var edgeFeatures = new List <IFeature <int> >();

                var classParams   = new List <double>();
                var classFeatures = new List <IFeature <int> >();

                var model = classifier[c];

                if (includePriors)
                {
                    // Create features for class labels
                    classParams.Add(Math.Log(classifier.Priors[c]));
                    classFeatures.Add(new OutputFeature <int>(this, c, c));
                }

                // Create features for initial state probabilities
                for (int i = 0; i < model.States; i++)
                {
                    edgeParams.Add(model.LogInitial[i]);
                    edgeFeatures.Add(new InitialFeature <int>(this, c, i));
                }

                // Create features for state transition probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int j = 0; j < model.States; j++)
                    {
                        edgeParams.Add(model.LogTransitions[i][j]);
                        edgeFeatures.Add(new TransitionFeature <int>(this, c, i, j));
                    }
                }

                // Create features for symbol emission probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int k = 0; k < model.Emissions[i].Length; k++)
                    {
                        stateParams.Add(model.Emissions[i][k]);
                        stateFeatures.Add(new EmissionFeature(this, c, i, k));
                    }
                }


                classOffset[c] = factorIndex;
                edgeOffset[c]  = factorIndex + classParams.Count;
                stateOffset[c] = factorIndex + classParams.Count + edgeParams.Count;

                classCount[c] = classParams.Count;
                edgeCount[c]  = edgeParams.Count;
                stateCount[c] = stateParams.Count;


                // 1. classes
                factorFeatures.AddRange(classFeatures);
                factorParams.AddRange(classParams);

                // 2. edges
                factorFeatures.AddRange(edgeFeatures);
                factorParams.AddRange(edgeParams);

                // 3. states
                factorFeatures.AddRange(stateFeatures);
                factorParams.AddRange(stateParams);

                factorIndex += classParams.Count + stateParams.Count + edgeParams.Count;
            }

            Accord.Diagnostics.Debug.Assert(factorIndex == factorParams.Count);
            Accord.Diagnostics.Debug.Assert(factorIndex == factorFeatures.Count);

            this.Weights  = factorParams.ToArray();
            this.Features = factorFeatures.ToArray();


            for (int c = 0; c < classifier.Models.Length; c++)
            {
                Factors[c] = new MarkovDiscreteFactor(this, classifier.Models[c].States, c, Symbols,
                                                      classIndex: classOffset[c], classCount: classCount[c],  // 1. classes
                                                      edgeIndex: edgeOffset[c], edgeCount: edgeCount[c],      // 2. edges
                                                      stateIndex: stateOffset[c], stateCount: stateCount[c]); // 3. states
            }
        }
        /// <summary>
        ///   Constructs a new potential function modeling Hidden Markov Models.
        /// </summary>
        ///
        /// <param name="classifier">A hidden Markov sequence classifier.</param>
        /// <param name="includePriors">True to include class features (priors), false otherwise.</param>
        ///
        public MarkovMultivariateFunction(
            HiddenMarkovClassifier <MultivariateNormalDistribution> classifier, bool includePriors = true)
        {
            this.Outputs    = classifier.Classes;
            this.Dimensions = classifier.Models[0].Dimension;

            int factorIndex    = 0;
            var factorParams   = new List <double>();
            var factorFeatures = new List <IFeature <double[]> >();

            this.Factors = new FactorPotential <double[]> [Outputs];

            int[] classOffset = new int[classifier.Classes];
            int[] edgeOffset  = new int[classifier.Classes];
            int[] stateOffset = new int[classifier.Classes];
            int[] classCount  = new int[classifier.Classes];
            int[] edgeCount   = new int[classifier.Classes];
            int[] stateCount  = new int[classifier.Classes];


            // Create features for initial class probabilities
            for (int c = 0; c < classifier.Classes; c++)
            {
                var stateParams   = new List <double>();
                var stateFeatures = new List <IFeature <double[]> >();

                var edgeParams   = new List <double>();
                var edgeFeatures = new List <IFeature <double[]> >();

                var classParams   = new List <double>();
                var classFeatures = new List <IFeature <double[]> >();

                var model = classifier[c];

                if (includePriors)
                {
                    // Create features for class labels
                    classParams.Add(Math.Log(classifier.Priors[c]));
                    classFeatures.Add(new OutputFeature <double[]>(this, c, c));
                }

                // Create features for initial state probabilities
                for (int i = 0; i < model.States; i++)
                {
                    edgeParams.Add(model.Probabilities[i]);
                    edgeFeatures.Add(new InitialFeature <double[]>(this, c, i));
                }

                // Create features for state transition probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int j = 0; j < model.States; j++)
                    {
                        edgeParams.Add(model.Transitions[i, j]);
                        edgeFeatures.Add(new TransitionFeature <double[]>(this, c, i, j));
                    }
                }

                // Create features emission probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int d = 0; d < model.Dimension; d++)
                    {
                        double mean = model.Emissions[i].Mean[d];
                        double var  = model.Emissions[i].Variance[d];

                        double u  = -0.5 * (Math.Log(2.0 * Math.PI * var) + (mean * mean) / var);
                        double m1 = mean / var;
                        double m2 = -1.0 / (2.0 * var);

                        // Occupancy
                        stateParams.Add(u);
                        stateFeatures.Add(new OccupancyFeature <double[]>(this, c, i));

                        // 1st Moment (x)
                        stateParams.Add(m1);
                        stateFeatures.Add(new MultivariateFirstMomentFeature(this, c, i, d));

                        // 2nd Moment (x²)
                        stateParams.Add(m2);
                        stateFeatures.Add(new MultivariateSecondMomentFeature(this, c, i, d));
                    }
                }

                classOffset[c] = factorIndex;
                edgeOffset[c]  = factorIndex + classParams.Count;
                stateOffset[c] = factorIndex + classParams.Count + edgeParams.Count;

                classCount[c] = classParams.Count;
                edgeCount[c]  = edgeParams.Count;
                stateCount[c] = stateParams.Count;


                // 1. classes
                factorFeatures.AddRange(classFeatures);
                factorParams.AddRange(classParams);

                // 2. edges
                factorFeatures.AddRange(edgeFeatures);
                factorParams.AddRange(edgeParams);

                // 3. states
                factorFeatures.AddRange(stateFeatures);
                factorParams.AddRange(stateParams);

                factorIndex += classParams.Count + stateParams.Count + edgeParams.Count;
            }

            System.Diagnostics.Debug.Assert(factorIndex == factorParams.Count);
            System.Diagnostics.Debug.Assert(factorIndex == factorFeatures.Count);

            this.Weights  = factorParams.ToArray();
            this.Features = factorFeatures.ToArray();

            for (int c = 0; c < classifier.Models.Length; c++)
            {
                Factors[c] = new MarkovMultivariateNormalFactor(this, classifier.Models[c].States, c, Dimensions,
                                                                classIndex: classOffset[c], classCount: classCount[c],  // 1. classes
                                                                edgeIndex: edgeOffset[c], edgeCount: edgeCount[c],      // 2. edges
                                                                stateIndex: stateOffset[c], stateCount: stateCount[c]); // 3. states
            }
        }
示例#32
0
 /// <summary>
 ///   Creates a new instance of the learning algorithm for a given
 ///   Markov sequence classifier using the specified configuration
 ///   function.
 /// </summary>
 ///
 public HiddenMarkovClassifierLearning(HiddenMarkovClassifier <TDistribution> classifier,
                                       ClassifierLearningAlgorithmConfiguration algorithm)
     : base(classifier, algorithm)
 {
 }
        public void LearnTest2()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,0,1,2 },     // Class 0
                new int[] { 0,1,1,2 },     // Class 0
                new int[] { 0,0,0,1,2 },   // Class 0
                new int[] { 0,1,2,2,2 },   // Class 0

                new int[] { 2,2,1,0 },     // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,2,1 },   // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            );

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier using the algorithm
            double likelihood = teacher.Run(inputs, outputs);

            HiddenMarkovModel threshold = classifier.Threshold;

            Assert.AreEqual(6, threshold.States);

            Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10);

            Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10);

            for (int i = 0; i < 3; i++)
                for (int j = 3; j < 6; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);

            for (int i = 3; i < 6; i++)
                for (int j = 0; j < 3; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);

            Assert.IsFalse(Matrix.HasNaN(threshold.Transitions));

            classifier.Sensitivity = 0.5;

            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual = classifier.Compute(inputs[i], out likelihood);
                Assert.AreEqual(expected, actual);
            }


            int[] r0 = new int[] { 1, 1, 0, 0, 2 };


            double logRejection;
            int c = classifier.Compute(r0, out logRejection);

            Assert.AreEqual(-1, c);
            Assert.AreEqual(0.99906957195279988, logRejection);
            Assert.IsFalse(double.IsNaN(logRejection));

            logRejection = threshold.Evaluate(r0);
            Assert.AreEqual(-4.5653702970734793, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));

            threshold.Decode(r0, out logRejection);
            Assert.AreEqual(-8.21169955167614, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));

            foreach (var model in classifier.Models)
            {
                double[,] A = model.Transitions;

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.Exp().GetRow(i);
                    double sum = row.Sum();
                    Assert.AreEqual(1, sum, 1e-10);
                }
            }
            {
                double[,] A = classifier.Threshold.Transitions;

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.GetRow(i);
                    double sum = row.Exp().Sum();
                    Assert.AreEqual(1, sum, 1e-6);
                }
            }
        }
        public void RecognizeAsync(ISoundSignalReader signal, HiddenMarkovClassifier hmm, Action<string> handleMessage,
            SignalVisitor voiceVisitor = null)
        {
            Action<List<double[]>> action = features =>
            {
                var observations = _codeBook.Quantize(features.Select(item => new Point(item)).ToArray());
                double[] responsabilities;
                var ret = hmm.Compute(observations, out responsabilities);

                var models = hmm.Models;
                var likelyHoodValue = Double.MinValue;

                foreach (var model in models)
                {
                    var val = model.Evaluate(observations);
                    if (val > likelyHoodValue)
                    {
                        likelyHoodValue = val;
                    }
                }

                handleMessage(hmm[ret].Tag.ToString());
            };

            var featureUtility = new FeatureUtility(_engineParameters);
            featureUtility.ExtractFeaturesAsync(signal, action, voiceVisitor);
        }
        public TrainResult TrainAll(Dictionary<string, IList<ISoundSignalReader>> signalsDictionary,
            SignalVisitor voiceVisitor = null)
        {
            var numberOfItems = 0;
            foreach (var item in signalsDictionary)
            {
                numberOfItems += item.Value.Count;
            }

            double[][][][] featuresInput = new Double[signalsDictionary.Count][][][];

            int[] models = new int[numberOfItems];
            var allSignalIndex = 0;
            var modelIndex = 0;

            var featureUtility = new FeatureUtility(_engineParameters);

            foreach (var item in signalsDictionary)
            {
                var signals = item.Value; // signals
                var signalsCount = signals.Count();

                featuresInput[modelIndex] = new double[signalsCount][][];

                for (var signalIndex = 0; signalIndex < signalsCount; signalIndex++)
                {
                    var signal = signals[signalIndex];
                    List<Double[]> features = featureUtility.ExtractFeatures(signal, voiceVisitor).First();

                    featuresInput[modelIndex][signalIndex] = features.ToArray();
                    models[allSignalIndex] = modelIndex;
                    allSignalIndex++;
                }
                modelIndex++;
            }

            List<int[]> observables = new List<int[]>();

            for (int wordIndex = 0; wordIndex < featuresInput.Length; wordIndex++) // foreach word
            {
                for (var signalIndex = 0; signalIndex < featuresInput[wordIndex].Length; signalIndex++)
                    // foreach word signal
                {
                    var points = featuresInput[wordIndex][signalIndex].Select(item => new Point(item));
                        // convert feature to points

                    var codeItems = _codeBook.Quantize(points.ToArray());
                    observables.Add(codeItems);
                }
            }
            //HiddenMarkovModel hmm = new HiddenMarkovModel(5, _codeBook.Size, true);
            //var Bauc

            var hmm = new HiddenMarkovClassifier(signalsDictionary.Count, new Forward(_numberOfHiddenStates),
                _codeBook.Size, signalsDictionary.Keys.ToArray());

            const int iterations = 200;
            const double tolerance = 0;

            var teacher = new HiddenMarkovClassifierLearning(hmm,
                i => new ViterbiLearning(hmm.Models[i]) {Iterations = iterations, Tolerance = tolerance}
                );

            teacher.Run(observables.ToArray(), models);

            return new TrainResult {Catalog = _codeBook, Models = hmm.Models.ToArray()};
        }
        public static HiddenMarkovClassifier <Independent <NormalDistribution> > CreateModel4(out double[][][] words, out int[] labels, bool usePriors)
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][] wardrobe2 =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.2 },
            };

            words = new double[][][] { hello, car, wardrobe, wardrobe2 };

            labels = new [] { 0, 1, 2, 2 };

            var initial = new Independent <NormalDistribution>
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );


            int numberOfWords  = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier <Independent <NormalDistribution> >
                             (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
                             );

            var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(classifier,

                                                                                                 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(classifier.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 100,

                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                                                 );

            if (usePriors)
            {
                teacher.Empirical = true;
            }

            double logLikelihood = teacher.Run(words, labels);

            Assert.AreEqual(208.38345600145777d, logLikelihood);

            return(classifier);
        }
        public static HiddenMarkovClassifier <Independent> CreateModel3(out double[][][] sequences2, out int[] labels2)
        {
            sequences2 = new double[][][]
            {
                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 1, 1.12, 2.41, 1.17, 9.3 },
                    new double[] { 1, 2.54, 1.45, 0.16, 4.5 },
                    new double[] { 1, 3.46, 2.63, 1.15, 9.2 },
                    new double[] { 1, 4.73, 0.41, 1.54, 5.5 },
                    new double[] { 2, 5.81, 2.42, 1.13, 9.1 },
                },

                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.49, 2.48, 1.18, 9.37 },
                    new double[] { 1, 2.18, 1.44, 2.19, 1.56 },
                    new double[] { 1, 3.77, 2.62, 1.10, 9.25 },
                    new double[] { 2, 4.76, 5.44, 3.58, 5.54 },
                    new double[] { 2, 5.85, 2.46, 1.16, 5.13 },
                    new double[] { 2, 4.84, 5.44, 3.54, 5.52 },
                    new double[] { 2, 5.83, 3.41, 1.22, 5.11 },
                },

                new double[][]
                {
                    // This is the first  sequence with label = 0
                    new double[] { 2, 1.11, 2.41, 1.12, 2.31 },
                    new double[] { 1, 2.52, 3.73, 0.12, 4.50 },
                    new double[] { 1, 3.43, 2.61, 1.24, 9.29 },
                    new double[] { 1, 4.74, 2.42, 2.55, 6.57 },
                    new double[] { 2, 5.85, 2.43, 1.16, 9.16 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 0, 1.26, 5.44, 1.56, 9.55 },
                    new double[] { 2, 2.67, 5.45, 4.27, 1.54 },
                    new double[] { 1, 1.28, 3.46, 2.18, 4.13 },
                    new double[] { 1, 5.89, 2.57, 1.79, 5.02 },
                    new double[] { 0, 1.40, 2.48, 2.10, 6.41 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 2, 3.21, 2.49, 1.54, 9.17 },
                    new double[] { 2, 2.62, 5.40, 4.25, 1.54 },
                    new double[] { 1, 1.53, 6.49, 2.17, 4.52 },
                    new double[] { 1, 2.84, 2.58, 1.73, 6.04 },
                    new double[] { 1, 1.45, 2.47, 2.28, 5.42 },
                    new double[] { 1, 1.46, 2.46, 2.35, 5.41 },
                },

                new double[][]
                {
                    // This is the second sequence with label = 1
                    new double[] { 1, 5.27, 5.45, 1.4, 9.5 },
                    new double[] { 2, 2.68, 2.54, 3.2, 2.2 },
                    new double[] { 1, 2.89, 3.83, 2.6, 4.1 },
                    new double[] { 1, 1.80, 1.32, 1.2, 4.2 },
                    new double[] { 0, 1.41, 2.41, 2.1, 6.4 },
                }
            };

            labels2 = new[] { 0, 0, 0, 1, 1, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1   = new GeneralDiscreteDistribution(3);
            var comp2   = new NormalDistribution(1);
            var comp3   = new NormalDistribution(2);
            var comp4   = new NormalDistribution(3);
            var comp5   = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier <Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning <Independent>(
                    classifier.Models[modelIndex])
            {
                Tolerance  = 0.0001,
                Iterations = 0,
            }
                );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            Assert.AreEqual(-3.0493028798326081d, logLikelihood, 1e-10);

            return(classifier);
        }
        public static HiddenMarkovClassifier<Independent> CreateModel3()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var comp3 = new NormalDistribution(2);
            var comp4 = new NormalDistribution(3);
            var comp5 = new NormalDistribution(4);
            var density = new Independent(comp1, comp2, comp3, comp4, comp5);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Forward(5), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences2, labels2);

            return classifier;
        }
        public void SimpleGestureRecognitionTest()
        {
            // Let's say we would like to do a very simple mechanism for
            // gesture recognition. In this example, we will be trying to
            // create a classifier that can distinguish between the words
            // "hello", "car", and "wardrobe".

            // Let's say we decided to acquire some data, and we asked some
            // people to perform those words in front of a Kinect camera, and,
            // using Microsoft's SDK, we were able to captured the x and y
            // coordinates of each hand while the word was being performed.

            // Let's say we decided to represent our frames as:
            //
            //    double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY };
            //
            // Since we captured words, this means we captured sequences of
            // frames as we described above. Let's write some of those as
            // rough examples to explain how gesture recognition can be done:

            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            // Here, please note that a real-world example would involve *lots*
            // of samples for each word. Here, we are considering just one from
            // each class which is clearly sub-optimal and should _never_ be done
            // on practice. For example purposes, however, please disregard this.

            // Those are the words we have in our vocabulary:
            //
            double[][][] words = { hello, car, wardrobe };

            // Now, let's associate integer labels with them. This is needed
            // for the case where there are multiple samples for each word.
            //
            int[] labels = { 0, 1, 2 };


            // We will create our classifiers assuming an independent
            // Gaussian distribution for each component in our feature
            // vectors (like assuming a Naive Bayes assumption).

            var initial = new Independent <NormalDistribution>
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );


            // Now, we can proceed and create our classifier.
            //
            int numberOfWords  = 3; // we are trying to distinguish between 3 words
            int numberOfStates = 5; // this value can be found by trial-and-error

            var hmm = new HiddenMarkovClassifier <Independent <NormalDistribution> >
                      (
                classes: numberOfWords,
                topology: new Forward(numberOfStates), // word classifiers should use a forward topology
                initial: initial
                      );

            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(hmm,

                                                                                                 // Train each model until the log-likelihood changes less than 0.001
                                                                                                 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(hmm.Models[modelIndex])
            {
                Tolerance  = 0.001,
                Iterations = 100,

                // This is necessary so the code doesn't blow up when it realize
                // there is only one sample per word class. But this could also be
                // needed in normal situations as well.
                //
                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                                                 );

            // Finally, we can run the learning algorithm!
            double logLikelihood = teacher.Run(words, labels);

            // At this point, the classifier should be successfully
            // able to distinguish between our three word classes:
            //
            int tc1 = hmm.Compute(hello);
            int tc2 = hmm.Compute(car);
            int tc3 = hmm.Compute(wardrobe);

            Assert.AreEqual(0, tc1);
            Assert.AreEqual(1, tc2);
            Assert.AreEqual(2, tc3);

            // Now, we can use the Markov classifier to initialize a HCRF
            var function = new MarkovMultivariateFunction(hmm);
            var hcrf     = new HiddenConditionalRandomField <double[]>(function);


            // We can check that both are equivalent, although they have
            // formulations that can be learned with different methods
            //
            for (int i = 0; i < words.Length; i++)
            {
                // Should be the same
                int expected = hmm.Compute(words[i]);
                int actual   = hcrf.Compute(words[i]);

                // Should be the same
                double h0 = hmm.LogLikelihood(words[i], 0);
                double c0 = hcrf.LogLikelihood(words[i], 0);

                double h1 = hmm.LogLikelihood(words[i], 1);
                double c1 = hcrf.LogLikelihood(words[i], 1);

                double h2 = hmm.LogLikelihood(words[i], 2);
                double c2 = hcrf.LogLikelihood(words[i], 2);

                Assert.AreEqual(expected, actual);
                Assert.AreEqual(h0, c0, 1e-10);
                Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10));
                Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10));

                Assert.IsFalse(double.IsNaN(c0));
                Assert.IsFalse(double.IsNaN(c1));
                Assert.IsFalse(double.IsNaN(c2));
            }


            // Now we can learn the HCRF using one of the best learning
            // algorithms available, Resilient Backpropagation learning:

            // Create a learning algorithm
            var rprop = new HiddenResilientGradientLearning <double[]>(hcrf)
            {
                Iterations = 50,
                Tolerance  = 1e-5
            };

            // Run the algorithm and learn the models
            double error = rprop.Run(words, labels);

            // At this point, the HCRF should be successfully
            // able to distinguish between our three word classes:
            //
            int hc1 = hcrf.Compute(hello);
            int hc2 = hcrf.Compute(car);
            int hc3 = hcrf.Compute(wardrobe);

            Assert.AreEqual(0, hc1);
            Assert.AreEqual(1, hc2);
            Assert.AreEqual(2, hc3);
        }
        private static HiddenMarkovClassifier<NormalDistribution> createClassifier(
            out double[][] sequences, bool rejection = false)
        {
            sequences = new double[][] 
            {
                new double[] { 0,1,2,3,4 }, 
                new double[] { 4,3,2,1,0 }, 
            };

            int[] labels = { 0, 1 };

            NormalDistribution density = new NormalDistribution();
            HiddenMarkovClassifier<NormalDistribution> classifier =
                new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density);

            var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier,

                modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            teacher.Rejection = rejection;
            teacher.Run(sequences, labels);

            return classifier;
        }
        public void SaveLoadTest()
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][][] words = { hello, car, wardrobe };

            int[] labels = { 0, 1, 2 };

            var initial = new Independent
                          (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
                          );

            int numberOfWords  = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier <Independent>
                             (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
                             );

            var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier,
                                                                           modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex])
            {
                Tolerance      = 0.001,
                Iterations     = 100,
                FittingOptions = new IndependentOptions()
                {
                    InnerOption = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            }
                                                                           );

            double logLikelihood = teacher.Run(words, labels);

            var function = new MarkovMultivariateFunction(classifier);
            var hcrf     = new HiddenConditionalRandomField <double[]>(function);


            MemoryStream stream = new MemoryStream();

            hcrf.Save(stream);

            stream.Seek(0, SeekOrigin.Begin);

            var target = HiddenConditionalRandomField <double[]> .Load(stream);

            Assert.AreEqual(hcrf.Function.Factors.Length, target.Function.Factors.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
            {
                var e = hcrf.Function.Factors[i];
                var a = target.Function.Factors[i];
                Assert.AreEqual(e.Index, target.Function.Factors[i].Index);
                Assert.AreEqual(e.States, target.Function.Factors[i].States);

                Assert.AreEqual(e.EdgeParameters.Count, a.EdgeParameters.Count);
                Assert.AreEqual(e.EdgeParameters.Offset, a.EdgeParameters.Offset);
                Assert.AreEqual(e.FactorParameters.Count, a.FactorParameters.Count);
                Assert.AreEqual(e.FactorParameters.Offset, a.FactorParameters.Offset);

                Assert.AreEqual(e.OutputParameters.Count, a.OutputParameters.Count);
                Assert.AreEqual(e.OutputParameters.Offset, a.OutputParameters.Offset);
                Assert.AreEqual(e.StateParameters.Count, a.StateParameters.Count);
                Assert.AreEqual(e.StateParameters.Offset, a.StateParameters.Offset);

                Assert.AreEqual(target.Function, a.Owner);
                Assert.AreEqual(hcrf.Function, e.Owner);
            }

            Assert.AreEqual(hcrf.Function.Features.Length, target.Function.Features.Length);
            for (int i = 0; i < hcrf.Function.Factors.Length; i++)
            {
                Assert.AreEqual(hcrf.Function.Features[i].GetType(), target.Function.Features[i].GetType());
            }

            Assert.AreEqual(hcrf.Function.Outputs, target.Function.Outputs);

            for (int i = 0; i < hcrf.Function.Weights.Length; i++)
            {
                Assert.AreEqual(hcrf.Function.Weights[i], target.Function.Weights[i]);
            }
        }
        public static HiddenMarkovClassifier<Independent> CreateModel2(out double[][][] sequences, out int[] labels)
        {
            sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1.1 },
                    new double[] { 1, 2.5 },
                    new double[] { 1, 3.4 },
                    new double[] { 1, 4.7 },
                    new double[] { 2, 5.8 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 2,  3.2 },
                    new double[] { 2,  2.6 },
                    new double[] { 1,  1.2 },
                    new double[] { 1,  0.8 },
                    new double[] { 0,  1.1 },
                }
            };

            labels = new[] { 0, 1 };

            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            var comp1 = new GeneralDiscreteDistribution(3);
            var comp2 = new NormalDistribution(1);
            var density = new Independent(comp1, comp2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<Independent>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<Independent>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<Independent>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            return classifier;
        }
        /// <summary>
        ///   Constructs a new potential function modeling Hidden Markov Models.
        /// </summary>
        ///
        /// <param name="classifier">A hidden Markov sequence classifier.</param>
        /// <param name="includePriors">True to include class features (priors), false otherwise.</param>
        ///
        public MarkovMultivariateFunction(HiddenMarkovClassifier <Independent> classifier,
                                          bool includePriors = true)
        {
            this.Outputs = classifier.Classes;

            int factorIndex    = 0;
            var factorParams   = new List <double>();
            var factorFeatures = new List <IFeature <double[]> >();

            this.Factors = new FactorPotential <double[]> [Outputs];

            int[] classOffset = new int[classifier.Classes];
            int[] edgeOffset  = new int[classifier.Classes];
            int[] stateOffset = new int[classifier.Classes];
            int[] classCount  = new int[classifier.Classes];
            int[] edgeCount   = new int[classifier.Classes];
            int[] stateCount  = new int[classifier.Classes];

            int[][][] lookupTables = new int[classifier.Classes][][];


            // Create features for initial class probabilities
            for (int c = 0; c < classifier.Classes; c++)
            {
                var stateParams   = new List <double>();
                var stateFeatures = new List <IFeature <double[]> >();

                var edgeParams   = new List <double>();
                var edgeFeatures = new List <IFeature <double[]> >();

                var classParams   = new List <double>();
                var classFeatures = new List <IFeature <double[]> >();

                var model = classifier[c];

                int[][] lookupTable = new int[model.States][];
                for (int i = 0; i < lookupTable.Length; i++)
                {
                    lookupTable[i] = new int[model.Dimension];
                }

                if (includePriors)
                {
                    // Create features for class labels
                    classParams.Add(Math.Log(classifier.Priors[c]));
                    classFeatures.Add(new OutputFeature <double[]>(this, c, c));
                }

                // Create features for initial state probabilities
                for (int i = 0; i < model.States; i++)
                {
                    edgeParams.Add(model.Probabilities[i]);
                    edgeFeatures.Add(new InitialFeature <double[]>(this, c, i));
                }

                // Create features for state transition probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int j = 0; j < model.States; j++)
                    {
                        edgeParams.Add(model.Transitions[i, j]);
                        edgeFeatures.Add(new TransitionFeature <double[]>(this, c, i, j));
                    }
                }

                int position = 0;

                // Create features emission probabilities
                for (int i = 0; i < model.States; i++)
                {
                    for (int d = 0; d < model.Emissions[i].Components.Length; d++)
                    {
                        IUnivariateDistribution distribution = model.Emissions[i].Components[d];

                        NormalDistribution normal = distribution as NormalDistribution;
                        if (normal != null)
                        {
                            double var  = normal.Variance;
                            double mean = normal.Mean;

                            // Occupancy
                            stateParams.Add(-0.5 * (Math.Log(2.0 * Math.PI * var) + (mean * mean) / var));
                            stateFeatures.Add(new OccupancyFeature <double[]>(this, c, i));
                            lookupTable[i][d] = position;
                            position++;

                            // 1st Moment (x)
                            stateParams.Add(mean / var);
                            stateFeatures.Add(new MultivariateFirstMomentFeature(this, c, i, d));
                            position++;

                            // 2nd Moment (x²)
                            stateParams.Add(-1.0 / (2.0 * var));
                            stateFeatures.Add(new MultivariateSecondMomentFeature(this, c, i, d));
                            position++;

                            continue;
                        }

                        var discrete = distribution as GeneralDiscreteDistribution;

                        if (discrete != null)
                        {
                            lookupTable[i][d] = position;

                            for (int k = 0; k < discrete.Frequencies.Length; k++)
                            {
                                stateParams.Add(Math.Log(discrete.Frequencies[k]));
                                stateFeatures.Add(new MultivariateEmissionFeature(this, c, i, k, d));
                                position++;
                            }

                            continue;
                        }
                    }
                }

                classOffset[c] = factorIndex;
                edgeOffset[c]  = factorIndex + classParams.Count;
                stateOffset[c] = factorIndex + classParams.Count + edgeParams.Count;

                classCount[c] = classParams.Count;
                edgeCount[c]  = edgeParams.Count;
                stateCount[c] = stateParams.Count;

                lookupTables[c] = lookupTable;


                // 1. classes
                factorFeatures.AddRange(classFeatures);
                factorParams.AddRange(classParams);

                // 2. edges
                factorFeatures.AddRange(edgeFeatures);
                factorParams.AddRange(edgeParams);

                // 3. states
                factorFeatures.AddRange(stateFeatures);
                factorParams.AddRange(stateParams);

                factorIndex += classParams.Count + stateParams.Count + edgeParams.Count;
            }

            System.Diagnostics.Debug.Assert(factorIndex == factorParams.Count);
            System.Diagnostics.Debug.Assert(factorIndex == factorFeatures.Count);

            this.Weights  = factorParams.ToArray();
            this.Features = factorFeatures.ToArray();


            for (int c = 0; c < classifier.Models.Length; c++)
            {
                Factors[c] = new MarkovIndependentFactor(this, classifier.Models[c].States, c, lookupTables[c],
                                                         classIndex: classOffset[c], classCount: classCount[c],  // 1. classes
                                                         edgeIndex: edgeOffset[c], edgeCount: edgeCount[c],      // 2. edges
                                                         stateIndex: stateOffset[c], stateCount: stateCount[c]); // 3. states
            }
        }
        public void LearnTest7()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.

            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var initialDensity = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>(
                classes: 2, topology: new Forward(2), initial: initialDensity);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions()
                    {
                        Diagonal = true,      // only diagonal covariance matrices
                        Regularization = 1e-5 // avoid non-positive definite errors
                    }
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);


            // Calculate the probability that the given
            //  sequences originated from the model
            double likelihood, likelihood2;

            // Try to classify the 1st sequence (output should be 0)
            int c1 = classifier.Compute(sequences[0], out likelihood);

            // Try to classify the 2nd sequence (output should be 1)
            int c2 = classifier.Compute(sequences[1], out likelihood2);


            Assert.AreEqual(0, c1);
            Assert.AreEqual(1, c2);

            Assert.AreEqual(-24.560663315259973, logLikelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood, 1e-10);
            Assert.AreEqual(0.99999999998805045, likelihood2, 1e-10);

            Assert.IsFalse(double.IsNaN(logLikelihood));
            Assert.IsFalse(double.IsNaN(likelihood));
            Assert.IsFalse(double.IsNaN(likelihood2));
        }
        public static HiddenMarkovClassifier<MultivariateNormalDistribution> CreateModel3(
            int states = 4, bool priors = true)
        {

            MultivariateNormalDistribution density = new MultivariateNormalDistribution(2);

            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>(6,
                new Forward(states), density);

            string[] labels = { "1", "2", "3", "4", "5", "6" };
            for (int i = 0; i < classifier.Models.Length; i++)
                classifier.Models[i].Tag = labels[i];

            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(classifier,

                // Train each model using the selected convergence criteria
                i => new BaumWelchLearning<MultivariateNormalDistribution>(classifier.Models[i])
                {
                    Tolerance = 0.1,
                    Iterations = 0,

                    FittingOptions = new NormalOptions() { Diagonal = true, Regularization = 1e-10 }
                }
            );

            teacher.Empirical = priors;

            // Run the learning algorithm
            teacher.Run(inputTest, outputTest);

            return classifier;
        }
        public static HiddenMarkovClassifier<Independent<NormalDistribution>> CreateModel4(out double[][][] words, out int[] labels, bool usePriors)
        {
            double[][] hello =
            {
                new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames
                new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded.
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 1.0, 0.0 },
                new double[] { 0.0, 0.0, 0.1, 1.1 },
            };

            double[][] car =
            {
                new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4.
                new double[] { 0.0, 0.0, 0.1, 0.0 },
                new double[] { 1.0, 0.0, 0.0, 0.0 },
            };

            double[][] wardrobe =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.1 },
            };

            double[][] wardrobe2 =
            {
                new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the
                new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word.
                new double[] { 0.0, 0.1, 1.0, 0.0 },
                new double[] { 0.1, 0.0, 1.0, 0.2 },
            };

            words = new double[][][] { hello, car, wardrobe, wardrobe2 };

            labels = new [] { 0, 1, 2, 2 };

            var initial = new Independent<NormalDistribution>
            (
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1),
                new NormalDistribution(0, 1)
            );


            int numberOfWords = 3;
            int numberOfStates = 5;

            var classifier = new HiddenMarkovClassifier<Independent<NormalDistribution>>
            (
                classes: numberOfWords,
                topology: new Forward(numberOfStates),
                initial: initial
            );

            var teacher = new HiddenMarkovClassifierLearning<Independent<NormalDistribution>>(classifier,

                modelIndex => new BaumWelchLearning<Independent<NormalDistribution>>(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 100,

                    FittingOptions = new IndependentOptions()
                    {
                        InnerOption = new NormalOptions() { Regularization = 1e-5 }
                    }
                }
            );

            if (usePriors)
                teacher.Empirical = true;

            double logLikelihood = teacher.Run(words, labels);

            return classifier;
        }
 /// <summary>
 ///   Creates a new <see cref="HiddenConditionalRandomField{T}"/> from the given <paramref name="classifier"/>.
 /// </summary>
 ///
 /// <param name="classifier">The classifier.</param>
 ///
 /// <returns>
 ///   A <see cref="HiddenConditionalRandomField{T}"/> that implements
 ///   exactly the same model as the given <paramref name="classifier"/>.
 /// </returns>
 ///
 public static HiddenConditionalRandomField <int> FromHiddenMarkov(HiddenMarkovClassifier <GeneralDiscreteDistribution, int> classifier)
 {
     return(new HiddenConditionalRandomField <int>(new MarkovDiscreteFunction(classifier)));
 }
示例#48
0
 public void StartClassifier()
 {
     classifer = new HiddenMarkovClassifier<MultivariateNormalDistribution>(models.ToArray());
 }
        public void LearnTest2()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,0,1,2 },     // Class 0
                new int[] { 0,1,1,2 },     // Class 0
                new int[] { 0,0,0,1,2 },   // Class 0
                new int[] { 0,1,2,2,2 },   // Class 0

                new int[] { 2,2,1,0 },     // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,2,1 },   // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            );

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier using the algorithm
            double likelihood = teacher.Run(inputs, outputs);

            HiddenMarkovModel threshold = classifier.Threshold;

            Assert.AreEqual(6, threshold.States);

            Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10);

            Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10);

            for (int i = 0; i < 3; i++)
                for (int j = 3; j < 6; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);

            for (int i = 3; i < 6; i++)
                for (int j = 0; j < 3; j++)
                    Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]);

            Assert.IsFalse(Matrix.HasNaN(threshold.Transitions));

            classifier.Sensitivity = 0.5;

            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual = classifier.Compute(inputs[i], out likelihood);
                Assert.AreEqual(expected, actual);
            }


            int[] r0 = new int[] { 1, 1, 0, 0, 2 };


            double logRejection;
            int c = classifier.Compute(r0, out logRejection);

            Assert.AreEqual(-1, c);
            Assert.AreEqual(0.99906957195279988, logRejection);
            Assert.IsFalse(double.IsNaN(logRejection));

            logRejection = threshold.Evaluate(r0);
            Assert.AreEqual(-4.5653702970734793, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));

            threshold.Decode(r0, out logRejection);
            Assert.AreEqual(-8.21169955167614, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));

            foreach (var model in classifier.Models)
            {
                double[,] A = model.Transitions;

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.Exp().GetRow(i);
                    double sum = row.Sum();
                    Assert.AreEqual(1, sum, 1e-10);
                }
            }
            {
                double[,] A = classifier.Threshold.Transitions;

                for (int i = 0; i < A.GetLength(0); i++)
                {
                    double[] row = A.GetRow(i);
                    double sum = row.Exp().Sum();
                    Assert.AreEqual(1, sum, 1e-6);
                }
            }
        }
        public void LearnTest9()
        {
            double[][][] inputs = large_gestures;
            int[] outputs = large_outputs;

            int states = 5;
            int iterations = 100;
            double tolerance = 0.01;
            bool rejection = true;
            double sensitivity = 1E-85;

            int dimension = inputs[0][0].Length;

            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2,
                new Forward(states), new MultivariateNormalDistribution(dimension));

            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm,

                // Train each model using the selected convergence criteria
                i => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[i])
                {
                    Tolerance = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            );

            teacher.Empirical = true;
            teacher.Rejection = rejection;

            // Run the learning algorithm
            double logLikelihood = teacher.Run(inputs, outputs);

            hmm.Sensitivity = sensitivity;

            for (int i = 0; i < large_gestures.Length; i++)
            {
                int actual = hmm.Compute(large_gestures[i]);
                int expected = large_outputs[i];
                Assert.AreEqual(expected,actual);
            }
        }
 /// <summary>
 ///   Creates a new <see cref="HiddenConditionalRandomField{T}"/> from the given <paramref name="classifier"/>.
 /// </summary>
 ///
 /// <param name="classifier">The classifier.</param>
 ///
 /// <returns>
 ///   A <see cref="HiddenConditionalRandomField{T}"/> that implements
 ///   exactly the same model as the given <paramref name="classifier"/>.
 /// </returns>
 ///
 public static HiddenConditionalRandomField <double[]> FromHiddenMarkov(HiddenMarkovClassifier <MultivariateNormalDistribution, double[]> classifier)
 {
     return(new HiddenConditionalRandomField <double[]>(new MarkovMultivariateFunction(classifier)));
 }
        static void runDiscreteDensityHiddenMarkovClassifierLearningExample()
        {
            // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols.
            int[][] observationSequences =
            {
                // First class of sequences: starts and ends with zeros, ones in the middle.
                new[] { 0, 1, 1, 1, 0 },
                new[] { 0, 0, 1, 1, 0, 0 },
                new[] { 0, 1, 1, 1, 1, 0 },

                // Second class of sequences: starts with twos and switches to ones until the end.
                new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 },

                // Third class of sequences: can start with any symbols, but ends with three.
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 0, 0, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 2, 2, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
                new[] { 0, 0, 1, 1, 3, 3, 3, 3 },
                new[] { 2, 2, 0, 3, 3, 3, 3 },
                new[] { 1, 0, 1, 2, 3, 3, 3, 3 },
                new[] { 1, 1, 2, 3, 3, 3, 3 },
            };

            // Consider their respective class labels.
            // Class labels have to be zero-based and successive integers.
            int[] classLabels =
            {
                0, 0, 0,  // Sequences 1-3 are from class 0.
                1, 1, 1,  // Sequences 4-6 are from class 1.
                2, 2, 2, 2, 2, 2, 2, 2  // Sequences 7-14 are from class 2.
            };

            // Use a single topology for all inner models.
            ITopology forward = new Forward(states: 3);

            // Create a hidden Markov classifier with the given topology.
            HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4);

            // Create a algorithms to teach each of the inner models.
            var trainer = new HiddenMarkovClassifierLearning(
                hmc,
                // Specify individual training options for each inner model.
                modelIndex => new BaumWelchLearning(hmc.Models[modelIndex])
                {
                    Tolerance = 0.001,  // iterate until log-likelihood changes less than 0.001.
                    Iterations = 0  // don't place an upper limit on the number of iterations.
                }
            );

            // Call its Run method to start learning.
            double averageLogLikelihood = trainer.Run(observationSequences, classLabels);
            Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood);

            // Check the output classificaton label for some sequences.
            int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 });  // output is y1 = 0.
            Console.WriteLine("output class = {0}", y1);
            int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 });  // output is y2 = 0.
            Console.WriteLine("output class = {0}", y2);

            int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 });  // output is y3 = 1.
            Console.WriteLine("output class = {0}", y3);
            int y4 = hmc.Compute(new[] { 2, 2, 1, 1 });  // output is y4 = 1.
            Console.WriteLine("output class = {0}", y4);

            int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 });  // output is y5 = 2.
            Console.WriteLine("output class = {0}", y4);
            int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 });  // output is y6 = 2.
            Console.WriteLine("output class = {0}", y6);
        }
 /// <summary>
 ///   Creates a new <see cref="HiddenConditionalRandomField{T}"/> from the given <paramref name="classifier"/>.
 /// </summary>
 ///
 /// <param name="classifier">The classifier.</param>
 ///
 /// <returns>
 ///   A <see cref="HiddenConditionalRandomField{T}"/> that implements
 ///   exactly the same model as the given <paramref name="classifier"/>.
 /// </returns>
 ///
 public static HiddenConditionalRandomField <double> FromHiddenMarkov(HiddenMarkovClassifier <NormalDistribution, double> classifier)
 {
     return(new HiddenConditionalRandomField <double>(new MarkovContinuousFunction(classifier)));
 }
        public void LogForwardTest3()
        {
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var observations = inputs[0];

            double[,] expected = Matrix.Log(Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.Forward(function.Factors[0], observations, 0));

            double logLikelihood;
            double[,] actual = Accord.Statistics.Models.Fields.
                ForwardBackwardAlgorithm.LogForward(function.Factors[0], observations, 0, out logLikelihood);

            Assert.IsTrue(expected.IsEqual(actual, 1e-10));

            double p = 0;
            for (int i = 0; i < hmm[0].States; i++)
                p += Math.Exp(actual[observations.Length - 1, i]);

            Assert.AreEqual(Math.Exp(logLikelihood), p, 1e-8);
            Assert.IsFalse(double.IsNaN(p));
        }
        public static HiddenMarkovClassifier<MultivariateNormalDistribution> CreateModel2()
        {
            // Create a Continuous density Hidden Markov Model Sequence Classifier
            // to detect a multivariate sequence and the same sequence backwards.
            double[][][] sequences = new double[][][]
            {
                new double[][] 
                { 
                    // This is the first  sequence with label = 0
                    new double[] { 0, 1 },
                    new double[] { 1, 2 },
                    new double[] { 2, 3 },
                    new double[] { 3, 4 },
                    new double[] { 4, 5 },
                }, 

                new double[][]
                {
                        // This is the second sequence with label = 1
                    new double[] { 4,  3 },
                    new double[] { 3,  2 },
                    new double[] { 2,  1 },
                    new double[] { 1,  0 },
                    new double[] { 0, -1 },
                }
            };

            // Labels for the sequences
            int[] labels = { 0, 1 };


            var density = new MultivariateNormalDistribution(2);

            // Creates a sequence classifier containing 2 hidden Markov Models with 2 states
            // and an underlying multivariate mixture of Normal distributions as density.
            var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>(
                2, new Ergodic(2), density);

            // Configure the learning algorithms to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(
                classifier,

                // Train each model until the log-likelihood changes less than 0.0001
                modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>(
                    classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0,

                    FittingOptions = new NormalOptions() { Diagonal = true }
                }
            );

            // Train the sequence classifier using the algorithm
            double logLikelihood = teacher.Run(sequences, labels);

            return classifier;
        }
        static void runDiscreteDensityHiddenMarkovClassifierLearningExample()
        {
            // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols.
            int[][] observationSequences =
            {
                // First class of sequences: starts and ends with zeros, ones in the middle.
                new[] { 0, 1, 1, 1, 0 },
                new[] { 0, 0, 1, 1,0, 0 },
                new[] { 0, 1, 1, 1,1, 0 },

                // Second class of sequences: starts with twos and switches to ones until the end.
                new[] { 2, 2, 2, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 1, 2,1, 1, 1, 1, 1 },
                new[] { 2, 2, 2, 2,2, 1, 1, 1, 1 },

                // Third class of sequences: can start with any symbols, but ends with three.
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 0, 0, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,2, 2, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
                new[] { 0, 0, 1, 1,3, 3, 3, 3 },
                new[] { 2, 2, 0, 3,3, 3, 3 },
                new[] { 1, 0, 1, 2,3, 3, 3, 3 },
                new[] { 1, 1, 2, 3,3, 3, 3 },
            };

            // Consider their respective class labels.
            // Class labels have to be zero-based and successive integers.
            int[] classLabels =
            {
                0, 0, 0,               // Sequences 1-3 are from class 0.
                1, 1, 1,               // Sequences 4-6 are from class 1.
                2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2.
            };

            // Use a single topology for all inner models.
            ITopology forward = new Forward(states: 3);

            // Create a hidden Markov classifier with the given topology.
            HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4);

            // Create a algorithms to teach each of the inner models.
            var trainer = new HiddenMarkovClassifierLearning(
                hmc,
                // Specify individual training options for each inner model.
                modelIndex => new BaumWelchLearning(hmc.Models[modelIndex])
            {
                Tolerance  = 0.001, // iterate until log-likelihood changes less than 0.001.
                Iterations = 0      // don't place an upper limit on the number of iterations.
            }
                );

            // Call its Run method to start learning.
            double averageLogLikelihood = trainer.Run(observationSequences, classLabels);

            Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood);

            // Check the output classificaton label for some sequences.
            int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 });  // output is y1 = 0.

            Console.WriteLine("output class = {0}", y1);
            int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 });  // output is y2 = 0.

            Console.WriteLine("output class = {0}", y2);

            int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 });  // output is y3 = 1.

            Console.WriteLine("output class = {0}", y3);
            int y4 = hmc.Compute(new[] { 2, 2, 1, 1 });  // output is y4 = 1.

            Console.WriteLine("output class = {0}", y4);

            int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 });  // output is y5 = 2.

            Console.WriteLine("output class = {0}", y4);
            int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 });  // output is y6 = 2.

            Console.WriteLine("output class = {0}", y6);
        }
        private static HiddenMarkovClassifier createClassifier(
            out int[][] sequences, bool rejection = false)
        {
            sequences = new int[][] 
            {
                new int[] { 0,1,2,3,4 }, 
                new int[] { 4,3,2,1,0 }, 
            };

            int[] labels = { 0, 1 };

            HiddenMarkovClassifier classifier =
                new HiddenMarkovClassifier(2, new Ergodic(2), symbols: 5);

            var teacher = new HiddenMarkovClassifierLearning(classifier,

                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.0001,
                    Iterations = 0
                }
            );

            teacher.Rejection = rejection;
            teacher.Run(sequences, labels);

            return classifier;
        }
        public void LearnTest2()
        {
            // Declare some testing data
            int[][] inputs = new int[][]
            {
                new int[] { 0,0,1,2 },     // Class 0
                new int[] { 0,1,1,2 },     // Class 0
                new int[] { 0,0,0,1,2 }, // Class 0
                new int[] { 0,1,2,2,2 },   // Class 0

                new int[] { 2,2,1,0 },     // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,1,0 },   // Class 1
                new int[] { 2,2,2,2,1 },   // Class 1
            };

            int[] outputs = new int[]
            {
                0,0,0,0, // First four sequences are of class 0
                1,1,1,1, // Last four sequences are of class 1
            };


            // We are trying to predict two different classes
            int classes = 2;

            // Each sequence may have up to 3 symbols (0,1,2)
            int symbols = 3;

            // Nested models will have 3 states each
            int[] states = new int[] { 3, 3 };

            // Creates a new Hidden Markov Model Classifier with the given parameters
            HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols);


            // Create a new learning algorithm to train the sequence classifier
            var teacher = new HiddenMarkovClassifierLearning(classifier,

                // Train each model until the log-likelihood changes less than 0.001
                modelIndex => new BaumWelchLearning(classifier.Models[modelIndex])
                {
                    Tolerance = 0.001,
                    Iterations = 0
                }
            );

            // Enable support for sequence rejection
            teacher.Rejection = true;

            // Train the sequence classifier using the algorithm
            double likelihood = teacher.Run(inputs, outputs);


            // Will assert the models have learned the sequences correctly.
            for (int i = 0; i < inputs.Length; i++)
            {
                int expected = outputs[i];
                int actual = classifier.Compute(inputs[i], out likelihood);
                Assert.AreEqual(expected, actual);
            }

            HiddenMarkovModel threshold = classifier.Threshold;

            Assert.AreEqual(6, threshold.States);

            Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10);
            Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10);

            Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10);
            Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10);

            Assert.IsFalse(Matrix.HasNaN(threshold.Transitions));

            int[] r0 = new int[] { 1, 1, 0, 0, 2 };


            double logRejection;
            int c = classifier.Compute(r0, out logRejection);

            Assert.AreEqual(-1, c);
            Assert.AreEqual(0.99569011079012049, logRejection);
            Assert.IsFalse(double.IsNaN(logRejection));

            logRejection = threshold.Evaluate(r0);
            Assert.AreEqual(-6.7949285513628528, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));

            threshold.Decode(r0, out logRejection);
            Assert.AreEqual(-8.902077561009957, logRejection, 1e-10);
            Assert.IsFalse(double.IsNaN(logRejection));
        }
示例#59
0
        private void btnLearnHMM_Click(object sender, EventArgs e)
        {
            if (gridSamples.Rows.Count == 0)
            {
                MessageBox.Show("Please load or insert some data first.");
                return;
            }

            BindingList <Sequence> samples = database.Samples;
            BindingList <String>   classes = database.Classes;

            double[][][] inputs  = new double[samples.Count][][];
            int[]        outputs = new int[samples.Count];

            for (int i = 0; i < inputs.Length; i++)
            {
                inputs[i]  = samples[i].Input;
                outputs[i] = samples[i].Output;
            }

            int    states     = 5;
            int    iterations = 0;
            double tolerance  = 0.01;
            bool   rejection  = false;


            hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count,
                                                                                        new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray());


            // Create the learning algorithm for the ensemble classifier
            var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm)
            {
                // Train each model using the selected convergence criteria
                Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i])
                {
                    Tolerance  = tolerance,
                    Iterations = iterations,

                    FittingOptions = new NormalOptions()
                    {
                        Regularization = 1e-5
                    }
                }
            };

            teacher.Empirical = true;
            teacher.Rejection = rejection;


            // Run the learning algorithm
            teacher.Learn(inputs, outputs);


            // Classify all training instances
            foreach (var sample in database.Samples)
            {
                sample.RecognizedAs = hmm.Decide(sample.Input);
            }

            foreach (DataGridViewRow row in gridSamples.Rows)
            {
                var sample = row.DataBoundItem as Sequence;
                row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ?
                                                 Color.LightGreen : Color.White;
            }

            btnLearnHCRF.Enabled = true;
            hcrf = null;
        }
        public void GradientTest()
        {
            // Creates a sequence classifier containing 2 hidden Markov Models
            //  with 2 states and an underlying Normal distribution as density.
            MultivariateNormalDistribution density = new MultivariateNormalDistribution(3);
            var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density);

            double[][][] inputs =
            {
                new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } },
                new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } },
                new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } },
            };

            int[] outputs = 
            {
                0, 0, 1
            };

            var function = new MarkovMultivariateFunction(hmm);

            var model = new HiddenConditionalRandomField<double[]>(function);
            var target = new ForwardBackwardGradient<double[]>(model);

            FiniteDifferences diff = new FiniteDifferences(function.Weights.Length);

            diff.Function = parameters => func(model, parameters, inputs, outputs);

            double[] expected = diff.Compute(function.Weights);
            double[] actual = target.Gradient(function.Weights, inputs, outputs);


            for (int i = 0; i < actual.Length; i++)
            {
                Assert.AreEqual(expected[i], actual[i], 0.05);
                Assert.IsFalse(double.IsNaN(actual[i]));
                Assert.IsFalse(double.IsNaN(expected[i]));
            }
        }