public static HiddenMarkovModel<GeneralDiscreteDistribution, int> CreateDiscrete(double[,] transitions, double[,] emissions, double[] probabilities, bool logarithm = false) { ITopology topology = new Custom(transitions, probabilities, logarithm); if (emissions == null) { throw new ArgumentNullException("emissions"); } if (emissions.GetLength(0) != topology.States) { throw new ArgumentException( "The emission matrix should have the same number of rows as the number of states in the model.", "emissions"); } // Initialize B using a discrete distribution var B = new GeneralDiscreteDistribution[topology.States]; for (int i = 0; i < B.Length; i++) B[i] = new GeneralDiscreteDistribution(Accord.Math.Matrix.GetRow(emissions, i)); return new HiddenMarkovModel<GeneralDiscreteDistribution, int>(topology, B); }
public void LearnTest6() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var density = new MultivariateNormalDistribution(2); try { new HiddenMarkovClassifier<MultivariateNormalDistribution>( 2, new Custom(new double[2, 2], new double[2]), density); Assert.Fail(); } catch (ArgumentException) { } var topology = new Custom( new[,] { { 1 / 2.0, 1 / 2.0 }, { 1 / 2.0, 1 / 2.0 } }, new[] { 1.0, 0.0 }); Array.Clear(topology.Initial, 0, topology.Initial.Length); Array.Clear(topology.Transitions, 0, topology.Transitions.Length); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>( 2, topology, density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true } } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double response1, response2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out response1); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out response2); Assert.AreEqual(double.NegativeInfinity, logLikelihood); Assert.AreEqual(0, response1); Assert.AreEqual(0, response2); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(response1)); Assert.IsFalse(double.IsNaN(response2)); }