private void LoadAndTrain_Click(object sender, EventArgs e) { List <int> outputLabels = new List <int>(); List <int[]> inputSequences = new List <int[]>(); OpenFileDialog dlg = new OpenFileDialog(); dlg.Filter = "Gestures (*.xml)|*.xml"; dlg.Title = "Load Gestures"; dlg.RestoreDirectory = false; dlg.Multiselect = true; if (dlg.ShowDialog(this) == DialogResult.OK) { lblResult.Text = "Training..."; for (int i = 0; i < dlg.FileNames.Length; i++) { string name = dlg.FileNames[i]; List <int[]> inputSequencesTemp = _rec.LoadDirectionalCodewordsFile(name); for (int j = 0; j < inputSequencesTemp.Count; j++) { inputSequences.Add(inputSequencesTemp[j]); outputLabels.Add(i); } } ReloadViewForm(); } //ITopology forward = new Forward(4,3); ITopology[] forwards = new Forward[4]; forwards[0] = new Forward(5, 3); forwards[1] = new Forward(5, 3); forwards[2] = new Forward(5, 3); forwards[3] = new Forward(5, 3); _hmmc = new HiddenMarkovClassifier(4, forwards, 16); //hmmc.Models[0] = new HiddenMarkovModel(); //hmmc.Models[0].Transitions = null;kovModel(); // And create a algorithms to teach each of the inner models var teacher = new HiddenMarkovClassifierLearning(_hmmc, // We can specify individual training options for each inner model: modelIndex => new BaumWelchLearning(_hmmc.Models[modelIndex]) { Tolerance = 0.001, // iterate until log-likelihood changes less than 0.001 Iterations = 0 // don't place an upper limit on the number of iterations }); teacher.Run((int[][])inputSequences.ToArray(), (int[])outputLabels.ToArray()); _hmmc.Threshold = teacher.Threshold(); _hmmc.Sensitivity = 1; _hmms = _hmmc.Models; for (int i = 0; i < dlg.FileNames.Length; i++) { _hmms[i].Tag = Gesture.ParseName(dlg.FileNames[i]); } lblResult.Text = "Success!!"; }
public int Classify(double[][][] trainDataSet, int[] trainLabels, double[][] testData, String[] classes) { int states = 5; int dimensionsOfFeatures = 12; int numberOfClasses = classes.Length; int iterations = 0; double tolerance = 0.01; HiddenMarkovClassifier <MultivariateNormalDistribution> hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution> (numberOfClasses, new Forward(states), new MultivariateNormalDistribution(dimensionsOfFeatures), classes); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution>(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning <MultivariateNormalDistribution>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } ); teacher.Empirical = true; teacher.Rejection = false; // Run the learning algorithm double error = teacher.Run(trainDataSet, trainLabels); int predictedResult = hmm.Compute(testData); return(predictedResult); }
public static HiddenMarkovClassifier <NormalDistribution> CreateModel1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a univariate sequence and the same sequence backwards. double[][] sequences = new double[][] { new double[] { 0, 1, 2, 3, 4 }, // This is the first sequence with label = 0 new double[] { 4, 3, 2, 1, 0 }, // This is the second sequence with label = 1 }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution density = new NormalDistribution(); var classifier = new HiddenMarkovClassifier <NormalDistribution>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <NormalDistribution>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning <NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); return(classifier); }
private static HiddenMarkovClassifier <NormalDistribution> createClassifier( out double[][] sequences, bool rejection = false) { sequences = new double[][] { new double[] { 0, 1, 2, 3, 4 }, new double[] { 4, 3, 2, 1, 0 }, }; int[] labels = { 0, 1 }; NormalDistribution density = new NormalDistribution(); HiddenMarkovClassifier <NormalDistribution> classifier = new HiddenMarkovClassifier <NormalDistribution>(2, new Ergodic(2), density); var teacher = new HiddenMarkovClassifierLearning <NormalDistribution>(classifier, modelIndex => new BaumWelchLearning <NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); teacher.Rejection = rejection; teacher.Run(sequences, labels); return(classifier); }
public static HiddenMarkovClassifier<NormalDistribution> CreateModel1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a univariate sequence and the same sequence backwards. double[][] sequences = new double[][] { new double[] { 0,1,2,3,4 }, // This is the first sequence with label = 0 new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1 }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution density = new NormalDistribution(); var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); return classifier; }
private static HiddenMarkovClassifier createClassifier( out int[][] sequences, bool rejection = false) { sequences = new int[][] { new int[] { 0, 1, 2, 3, 4 }, new int[] { 4, 3, 2, 1, 0 }, }; int[] labels = { 0, 1 }; HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(2, new Ergodic(2), symbols: 5); var teacher = new HiddenMarkovClassifierLearning(classifier, modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); teacher.Rejection = rejection; teacher.Run(sequences, labels); return(classifier); }
public static HiddenMarkovClassifier <Independent> CreateModel3() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var comp3 = new NormalDistribution(2); var comp4 = new NormalDistribution(3); var comp5 = new NormalDistribution(4); var density = new Independent(comp1, comp2, comp3, comp4, comp5); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier <Independent>( 2, new Forward(5), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning <Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences2, labels2); return(classifier); }
// Start is called before the first frame update void Start() { Debug.Log("TESTING MACHINE LEARNING"); // Declare some training data int[][] inputs = new int[][] { new int[] { 0, 1, 1, 0 }, // Class 0 new int[] { 0, 0, 1, 0 }, // Class 0 new int[] { 0, 1, 1, 1, 0 }, // Class 0 new int[] { 0, 1, 0 }, // Class 0 new int[] { 1, 0, 0, 1 }, // Class 1 new int[] { 1, 1, 0, 1 }, // Class 1 new int[] { 1, 0, 0, 0, 1 }, // Class 1 new int[] { 1, 0, 1 }, // Class 1 new int[] { 0, 0, 0, 0, 1, 0 }, }; int[] outputs = new int[] { 0, 0, 0, 0, // First four sequences are of class 0 1, 1, 1, 1, // Last four sequences are of class 1 2, }; // We are trying to predict two different classes int classes = 3; // Each sequence may have up to two symbols (0 or 1) int symbols = 2; // Nested models will have two states each int[] states = new int[] { 3, 3, 3 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, MaxIterations = 1000 } ); // Train the sequence classifier using the algorithm teacher.Learn(inputs, outputs); // Compute the classifier answers for the given inputs int[] answers = classifier.Decide(inputs); foreach (var item in answers) { Debug.Log(item); } }
public void LearnTest() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,0,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,0 }, // Class 0 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to two symbols (0 or 1) int symbols = 2; // Nested models will have two states each int[] states = new int[] { 2, 2 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); // Will assert the models have learned the sequences correctly. for (int i = 0; i < inputs.Length; i++) { int expected = outputs[i]; int actual = classifier.Compute(inputs[i], out likelihood); Assert.AreEqual(expected, actual); } }
public static HiddenMarkovClassifier <Independent> CreateModel2(out double[][][] sequences, out int[] labels) { sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1.1 }, new double[] { 1, 2.5 }, new double[] { 1, 3.4 }, new double[] { 1, 4.7 }, new double[] { 2, 5.8 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 2, 3.2 }, new double[] { 2, 2.6 }, new double[] { 1, 1.2 }, new double[] { 1, 0.8 }, new double[] { 0, 1.1 }, } }; labels = new[] { 0, 1 }; // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var density = new Independent(comp1, comp2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier <Independent>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning <Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); Assert.AreEqual(double.NegativeInfinity, logLikelihood); // only one training sample per class return(classifier); }
public void Run() { /*Initialize the model * Read more tut on Code project for better understanding * http://www.codeproject.com/Articles/541428/Sequence-Classifiers-in-Csharp-Part-I-Hidden-Marko?msg=5219822#xx5219822xx * states is parameters for running forward algo * intteration is parameters for iterations * tolerance is parameters for threshold * */ int states = 3; int iterations = 100; double tolerance = 0.01; bool rejection = false; string[] classes = ActivityIndex.Keys.ToArray(); ITopology foward = new Forward(states: 3); hmm = new HiddenMarkovClassifier(classes: 12, topology: foward, symbols: 5); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, } ); teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm double error = teacher.Run(input, output); Console.WriteLine("Error: {0}", error); //Run the test and compare the real value using (StreamWriter writer = new StreamWriter("compare.txt")) { for (int i = 0; i < outpuTest.Length; ++i) { int val = hmm.Compute(inpuTest[i]); if (val != outpuTest[i]) { string labelTestRetrieve = ActivityIndex.FirstOrDefault(x => x.Value == val).Key; string labelTestActity = ActivityIndex.FirstOrDefault(x => x.Value == outpuTest[i]).Key; writer.WriteLine(outputTestLabelFolder[i] + " - " + "false, label test retrieve: " + labelTestRetrieve + " label activity: " + labelTestActity); } else { string labelTestActity = ActivityIndex.FirstOrDefault(x => x.Value == outpuTest[i]).Key; writer.WriteLine(outputTestLabelFolder[i] + " - " + "true, label activity: " + labelTestActity); } } } }
public void LearnTest2() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0, 0, 1, 2 }, // Class 0 new int[] { 0, 1, 1, 2 }, // Class 0 new int[] { 0, 0, 0, 1, 2 }, // Class 0 new int[] { 0, 1, 2, 2, 2 }, // Class 0 new int[] { 2, 2, 1, 0 }, // Class 1 new int[] { 2, 2, 2, 1, 0 }, // Class 1 new int[] { 2, 2, 2, 1, 0 }, // Class 1 new int[] { 2, 2, 2, 2, 1 }, // Class 1 }; int[] outputs = new int[] { 0, 0, 0, 0, // First four sequences are of class 0 1, 1, 1, 1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to 3 symbols (0,1,2) int symbols = 3; // Nested models will have 3 states each int[] states = new int[] { 3, 3 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Enable support for sequence rejection teacher.Rejection = true; // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); //Assert.AreEqual(-0.84036002169161428, likelihood, 1e-15); likelihood = testThresholdModel(inputs, outputs, classifier, likelihood); }
public void LearnTest1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a univariate sequence and the same sequence backwards. double[][] sequences = new double[][] { new double[] { 0,1,2,3,4 }, // This is the first sequence with label = 0 new double[] { 4,3,2,1,0 }, // This is the second sequence with label = 1 }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution density = new NormalDistribution(); var classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood1, likelihood2; // Try to classify the first sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood1); // Try to classify the second sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(likelihood1)); Assert.IsFalse(double.IsNaN(likelihood2)); }
public void LearnTest1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a univariate sequence and the same sequence backwards. double[][] sequences = new double[][] { new double[] { 0, 1, 2, 3, 4 }, // This is the first sequence with label = 0 new double[] { 4, 3, 2, 1, 0 }, // This is the second sequence with label = 1 }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution density = new NormalDistribution(); var classifier = new HiddenMarkovClassifier <NormalDistribution>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <NormalDistribution>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning <NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood1, likelihood2; // Try to classify the first sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood1); // Try to classify the second sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-13.271981026832929, logLikelihood, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(likelihood1)); Assert.IsFalse(double.IsNaN(likelihood2)); }
public static HiddenMarkovClassifier <Independent> CreateModel1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0 }, new double[] { 1 }, new double[] { 2 }, new double[] { 3 }, new double[] { 4 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4 }, new double[] { 3 }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution component = new NormalDistribution(); Independent density = new Independent(component); var classifier = new HiddenMarkovClassifier <Independent>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); Assert.AreEqual(-13.271981026832929d, logLikelihood, 1e-10); return(classifier); }
/// <summary> /// learn Hmm model for samples in given database using Baum Welch unsupervised learning algorithm /// then used the learned model to classify the training samples /// </summary> /// <param name="database"></param> /// <returns></returns> static HiddenMarkovClassifier <MultivariateNormalDistribution> learnHMM(Database database) { BindingList <Sequence> samples = database.Samples; BindingList <String> classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int states = 5; int iterations = 0; double tolerance = 0.1; bool rejection = true; HiddenMarkovClassifier <MultivariateNormalDistribution> hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution>(classes.Count, new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution>(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning <MultivariateNormalDistribution>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } ); teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm double error = teacher.Run(inputs, outputs); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = hmm.Compute(sample.Input); } return(hmm); }
public void LearnGesture(int valuesUsed, int statesUsed) { double[][][] inputs = new double[storedGestures.Count][][]; int[] outputs = new int[storedGestures.Count]; for (int i = 0; i < inputs.Length; i++) { double[][] atemp = new double[storedGestures[i].points.Length][]; for (int j = 0; j < storedGestures[i].points.Length; j++) { double[] btemp = new double[valuesUsed]; for (int k = 0; k < valuesUsed; k++) { btemp[k] = storedGestures[i].points[j][k]; } atemp[j] = btemp; } inputs[i] = atemp; outputs[i] = storedGestures[i].index; } List <String> classes = new List <String>(); int states = gestureIndex.Count; MultivariateNormalDistribution dist = new MultivariateNormalDistribution(valuesUsed); hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]> (states, new Forward(statesUsed), dist); var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm) { Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i]) { Tolerance = 0.01, MaxIterations = 0, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } }; teacher.Empirical = true; teacher.Rejection = false; teacher.Learn(inputs, outputs); Debug.Log("Sequence Learned!"); }
public static HiddenMarkovClassifier CreateModel1() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,0,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,0 }, // Class 0 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to two symbols (0 or 1) int symbols = 2; // Nested models will have two states each int[] states = new int[] { 2, 2 }; // Creates a new Hidden Markov Model Classifier with the given parameters var classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); return classifier; }
public static HiddenMarkovClassifier CreateModel1() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0, 1, 1, 0 }, // Class 0 new int[] { 0, 0, 1, 0 }, // Class 0 new int[] { 0, 1, 1, 1, 0 }, // Class 0 new int[] { 0, 1, 0 }, // Class 0 new int[] { 1, 0, 0, 1 }, // Class 1 new int[] { 1, 1, 0, 1 }, // Class 1 new int[] { 1, 0, 0, 0, 1 }, // Class 1 new int[] { 1, 0, 1 }, // Class 1 }; int[] outputs = new int[] { 0, 0, 0, 0, // First four sequences are of class 0 1, 1, 1, 1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to two symbols (0 or 1) int symbols = 2; // Nested models will have two states each int[] states = new int[] { 2, 2 }; // Creates a new Hidden Markov Model Classifier with the given parameters var classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); return(classifier); }
public static void LearnAndPredictContinuous() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a univariate sequence and the same sequence backwards. double[][] sequences = new double[][] { new double[] { 0, 1, 2, 3, 4 }, // This is the first sequence with label = 0 new double[] { 4, 3, 2, 1, 0 }, // This is the second sequence with label = 1 }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a new Continuous-density Hidden Markov Model Sequence Classifier // containing 2 hidden Markov Models with 2 states and an underlying Normal // distribution as the continuous probability density. Gaussian density = new Gaussian(); var classifier = new HiddenMarkovClassifier(2, new Ergodic(2), density); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood; // Try to classify the first sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood); Console.WriteLine("c1: {0}", c1); // Try to classify the second sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood); Console.WriteLine("c2: {0}", c2); }
private static void hmmc(int[][] inputs, int[] outputs) { // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <GeneralDiscreteDistribution, int>() { Learner = (i) => new BaumWelchLearning <GeneralDiscreteDistribution, int, GeneralDiscreteOptions>() { Tolerance = 0.001, MaxIterations = 0 } }; // Train the sequence classifier using the algorithm var hmmClassifier = teacher.Learn(inputs, outputs); // Compute the classifier answers for the given inputs int[] answers = hmmClassifier.Decide(inputs); }
public void TrainModel(int states = 5, int iterations = 0, double tolerance = 0.01, bool rejection = false) { var samples = DataStore.Samples; var labels = DataStore.Labels; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } _hmm = new HiddenMarkovClassifier <NormalDistribution, double>(labels.Count, new Forward(states), new NormalDistribution(2), labels.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning <NormalDistribution, double>(_hmm) { // Train each model until the log-likelihood changes less than 0.001 Learner = modelIndex => new BaumWelchLearning <NormalDistribution, double>(_hmm.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } }; teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm // double error = teacher.Learn(inputs, outputs); // Classify all training instances foreach (var sample in samples) { // sample.RecognizedAs = _hmm.Compute(sample.Input); } }
public void Aprender(IDadosSinaisDinamicos dados) { var quantidadeCaracteristicas = dados.CaracteristicasSinais[0][0].Length; hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>( classes: dados.QuantidadeClasses, topology: new Forward(QuantidadeEstados), initial: new MultivariateNormalDistribution(quantidadeCaracteristicas) ); var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm, modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, FittingOptions = new NormalOptions { Regularization = 1e-5} }); teacher.Run(dados.CaracteristicasSinais, dados.IdentificadoresSinais); }
private void button1_Click(object sender, EventArgs e) { var classes = 4; var states = new[] { 1, 2, 2, 3 }; var cat = new[] { "ខ្ញុំ", "ទៅ", "ខ្លួន", "ក" }; //var cat = new[] { "A", "B" }; _hmmc = new HiddenMarkovClassifier(classes, states, 4, cat); // Train the ensemble var sequences = new[] { new[] { 1, 1, 1 }, new[] { 0, 2 }, new[] { 0, 1, 2 }, new[] { 1, 2 } }; var labels = new[] { 0, 1, 2, 3 }; var teacher = new HiddenMarkovClassifierLearning(_hmmc, i => new BaumWelchLearning(_hmmc.Models[i]) { Iterations = 0, Tolerance = 0.0001 } ); teacher.Run(sequences, labels); var m = _hmmc.Models; var test = new[] { 1, 2 }; double likelihood; var label = _hmmc.Compute(test, out likelihood); MessageBox.Show(_hmmc.Models[label].Tag.ToString() + " P =" + likelihood); }
private void button1_Click(object sender, EventArgs e) { var classes = 4; var states = new[]{1,2,2,3}; var cat = new[] {"ខ្ញុំ", "ទៅ", "ខ្លួន", "ក"}; //var cat = new[] { "A", "B" }; _hmmc = new HiddenMarkovClassifier(classes, states, 4, cat); // Train the ensemble var sequences = new[] { new[] {1, 1, 1}, new[] {0, 2}, new[] {0, 1, 2}, new[] {1, 2} }; var labels = new[] {0, 1, 2, 3}; var teacher = new HiddenMarkovClassifierLearning(_hmmc, i => new BaumWelchLearning(_hmmc.Models[i]) { Iterations = 0, Tolerance = 0.0001 } ); teacher.Run(sequences, labels); var m = _hmmc.Models; var test = new[]{1,2}; double likelihood; var label = _hmmc.Compute(test, out likelihood); MessageBox.Show(_hmmc.Models[label].Tag.ToString()+ " P =" + likelihood); }
public void LearnTest8() { // Declare some testing data double[][] inputs = new double[][] { new double[] { 0,0,1,2 }, // Class 0 new double[] { 0,1,1,2 }, // Class 0 new double[] { 0,0,0,1,2 }, // Class 0 new double[] { 0,1,2,2,2 }, // Class 0 new double[] { 2,2,1,0 }, // Class 1 new double[] { 2,2,2,1,0 }, // Class 1 new double[] { 2,2,2,1,0 }, // Class 1 new double[] { 2,2,2,2,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to 3 symbols (0,1,2) int symbols = 3; // Nested models will have 3 states each int[] states = new int[] { 3, 3 }; // Creates a new Hidden Markov Model Classifier with the given parameters var classifier = HiddenMarkovClassifier.CreateGeneric(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<GeneralDiscreteDistribution>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning<GeneralDiscreteDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Enable support for sequence rejection teacher.Rejection = true; // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); Assert.AreEqual(-0.84036002169162149, likelihood); likelihood = testThresholdModel(inputs, outputs, classifier, likelihood); }
public void SaveLoadTest() { double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; double[][][] words = { hello, car, wardrobe }; int[] labels = { 0, 1, 2 }; var initial = new Independent ( new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1) ); int numberOfWords = 3; int numberOfStates = 5; var classifier = new HiddenMarkovClassifier <Independent> ( classes: numberOfWords, topology: new Forward(numberOfStates), initial: initial ); var teacher = new HiddenMarkovClassifierLearning <Independent>(classifier, modelIndex => new BaumWelchLearning <Independent>(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } ); double logLikelihood = teacher.Run(words, labels); var function = new MarkovMultivariateFunction(classifier); var hcrf = new HiddenConditionalRandomField <double[]>(function); MemoryStream stream = new MemoryStream(); hcrf.Save(stream); stream.Seek(0, SeekOrigin.Begin); var target = HiddenConditionalRandomField <double[]> .Load(stream); Assert.AreEqual(hcrf.Function.Factors.Length, target.Function.Factors.Length); for (int i = 0; i < hcrf.Function.Factors.Length; i++) { var e = hcrf.Function.Factors[i]; var a = target.Function.Factors[i]; Assert.AreEqual(e.Index, target.Function.Factors[i].Index); Assert.AreEqual(e.States, target.Function.Factors[i].States); Assert.AreEqual(e.EdgeParameters.Count, a.EdgeParameters.Count); Assert.AreEqual(e.EdgeParameters.Offset, a.EdgeParameters.Offset); Assert.AreEqual(e.FactorParameters.Count, a.FactorParameters.Count); Assert.AreEqual(e.FactorParameters.Offset, a.FactorParameters.Offset); Assert.AreEqual(e.OutputParameters.Count, a.OutputParameters.Count); Assert.AreEqual(e.OutputParameters.Offset, a.OutputParameters.Offset); Assert.AreEqual(e.StateParameters.Count, a.StateParameters.Count); Assert.AreEqual(e.StateParameters.Offset, a.StateParameters.Offset); Assert.AreEqual(target.Function, a.Owner); Assert.AreEqual(hcrf.Function, e.Owner); } Assert.AreEqual(hcrf.Function.Features.Length, target.Function.Features.Length); for (int i = 0; i < hcrf.Function.Factors.Length; i++) { Assert.AreEqual(hcrf.Function.Features[i].GetType(), target.Function.Features[i].GetType()); } Assert.AreEqual(hcrf.Function.Outputs, target.Function.Outputs); for (int i = 0; i < hcrf.Function.Weights.Length; i++) { Assert.AreEqual(hcrf.Function.Weights[i], target.Function.Weights[i]); } }
public void SimpleGestureRecognitionTest() { // Let's say we would like to do a very simple mechanism for // gesture recognition. In this example, we will be trying to // create a classifier that can distinguish between the words // "hello", "car", and "wardrobe". // Let's say we decided to acquire some data, and we asked some // people to perform those words in front of a Kinect camera, and, // using Microsoft's SDK, we were able to captured the x and y // coordinates of each hand while the word was being performed. // Let's say we decided to represent our frames as: // // double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY }; // // Since we captured words, this means we captured sequences of // frames as we described above. Let's write some of those as // rough examples to explain how gesture recognition can be done: double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; // Here, please note that a real-world example would involve *lots* // of samples for each word. Here, we are considering just one from // each class which is clearly sub-optimal and should _never_ be done // on practice. For example purposes, however, please disregard this. // Those are the words we have in our vocabulary: // double[][][] words = { hello, car, wardrobe }; // Now, let's associate integer labels with them. This is needed // for the case where there are multiple samples for each word. // int[] labels = { 0, 1, 2 }; // We will create our classifiers assuming an independent // Gaussian distribution for each component in our feature // vectors (like assuming a Naive Bayes assumption). var initial = new Independent <NormalDistribution> ( new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1) ); // Now, we can proceed and create our classifier. // int numberOfWords = 3; // we are trying to distinguish between 3 words int numberOfStates = 5; // this value can be found by trial-and-error var hmm = new HiddenMarkovClassifier <Independent <NormalDistribution> > ( classes: numberOfWords, topology: new Forward(numberOfStates), // word classifiers should use a forward topology initial: initial ); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(hmm, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(hmm.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, // This is necessary so the code doesn't blow up when it realize // there is only one sample per word class. But this could also be // needed in normal situations as well. // FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } ); // Finally, we can run the learning algorithm! double logLikelihood = teacher.Run(words, labels); // At this point, the classifier should be successfully // able to distinguish between our three word classes: // int tc1 = hmm.Compute(hello); int tc2 = hmm.Compute(car); int tc3 = hmm.Compute(wardrobe); Assert.AreEqual(0, tc1); Assert.AreEqual(1, tc2); Assert.AreEqual(2, tc3); // Now, we can use the Markov classifier to initialize a HCRF var function = new MarkovMultivariateFunction(hmm); var hcrf = new HiddenConditionalRandomField <double[]>(function); // We can check that both are equivalent, although they have // formulations that can be learned with different methods // for (int i = 0; i < words.Length; i++) { // Should be the same int expected = hmm.Compute(words[i]); int actual = hcrf.Compute(words[i]); // Should be the same double h0 = hmm.LogLikelihood(words[i], 0); double c0 = hcrf.LogLikelihood(words[i], 0); double h1 = hmm.LogLikelihood(words[i], 1); double c1 = hcrf.LogLikelihood(words[i], 1); double h2 = hmm.LogLikelihood(words[i], 2); double c2 = hcrf.LogLikelihood(words[i], 2); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10)); Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10)); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); Assert.IsFalse(double.IsNaN(c2)); } // Now we can learn the HCRF using one of the best learning // algorithms available, Resilient Backpropagation learning: // Create a learning algorithm var rprop = new HiddenResilientGradientLearning <double[]>(hcrf) { Iterations = 50, Tolerance = 1e-5 }; // Run the algorithm and learn the models double error = rprop.Run(words, labels); // At this point, the HCRF should be successfully // able to distinguish between our three word classes: // int hc1 = hcrf.Compute(hello); int hc2 = hcrf.Compute(car); int hc3 = hcrf.Compute(wardrobe); Assert.AreEqual(0, hc1); Assert.AreEqual(1, hc2); Assert.AreEqual(2, hc3); }
/// <summary> /// Trains the hidden Markov classifier /// </summary> /// private void btnTrain_Click(object sender, EventArgs e) { DataTable source = dgvSequenceSource.DataSource as DataTable; if (source == null || hmmc == null) { MessageBox.Show("Please create a sequence classifier first."); return; } int rows = source.Rows.Count; // Gets the input sequences int[][] sequences = new int[rows][]; int[] labels = new int[rows]; // Foreach row in the datagridview for (int i = 0; i < rows; i++) { // Get the row at the index DataRow row = source.Rows[i]; // Get the label associated with this sequence string label = row["Label"] as string; // Extract the sequence and the expected label for it sequences[i] = decode(row["Sequences"] as string); labels[i] = hmmc.Models.Find(x => x.Tag as string == label)[0]; } // Grab training parameters int iterations = (int)numIterations.Value; double limit = (double)numConvergence.Value; if (rbStopIterations.Checked) { limit = 0; } else { iterations = 0; } // Create a new hidden Markov model learning algorithm var teacher = new HiddenMarkovClassifierLearning(hmmc, i => { return(new BaumWelchLearning(hmmc.Models[i]) { Iterations = iterations, Tolerance = limit }); }); // Learn the classifier teacher.Run(sequences, labels); // Update the GUI dgvModels_CurrentCellChanged(this, EventArgs.Empty); }
public static HiddenMarkovClassifier<Independent> CreateModel1() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0 }, new double[] { 1 }, new double[] { 2 }, new double[] { 3 }, new double[] { 4 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4 }, new double[] { 3 }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. NormalDistribution component = new NormalDistribution(); Independent density = new Independent(component); var classifier = new HiddenMarkovClassifier<Independent>(2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<Independent>(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning<Independent>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); Assert.AreEqual(-13.271981026832929d, logLikelihood); return classifier; }
static void runDiscreteDensityHiddenMarkovClassifierLearningExample() { // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols. int[][] observationSequences = { // First class of sequences: starts and ends with zeros, ones in the middle. new[] { 0, 1, 1, 1, 0 }, new[] { 0, 0, 1, 1,0, 0 }, new[] { 0, 1, 1, 1,1, 0 }, // Second class of sequences: starts with twos and switches to ones until the end. new[] { 2, 2, 2, 2,1, 1, 1, 1, 1 }, new[] { 2, 2, 1, 2,1, 1, 1, 1, 1 }, new[] { 2, 2, 2, 2,2, 1, 1, 1, 1 }, // Third class of sequences: can start with any symbols, but ends with three. new[] { 0, 0, 1, 1,3, 3, 3, 3 }, new[] { 0, 0, 0, 3,3, 3, 3 }, new[] { 1, 0, 1, 2,2, 2, 3, 3 }, new[] { 1, 1, 2, 3,3, 3, 3 }, new[] { 0, 0, 1, 1,3, 3, 3, 3 }, new[] { 2, 2, 0, 3,3, 3, 3 }, new[] { 1, 0, 1, 2,3, 3, 3, 3 }, new[] { 1, 1, 2, 3,3, 3, 3 }, }; // Consider their respective class labels. // Class labels have to be zero-based and successive integers. int[] classLabels = { 0, 0, 0, // Sequences 1-3 are from class 0. 1, 1, 1, // Sequences 4-6 are from class 1. 2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2. }; // Use a single topology for all inner models. ITopology forward = new Forward(states: 3); // Create a hidden Markov classifier with the given topology. HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4); // Create a algorithms to teach each of the inner models. var trainer = new HiddenMarkovClassifierLearning( hmc, // Specify individual training options for each inner model. modelIndex => new BaumWelchLearning(hmc.Models[modelIndex]) { Tolerance = 0.001, // iterate until log-likelihood changes less than 0.001. Iterations = 0 // don't place an upper limit on the number of iterations. } ); // Call its Run method to start learning. double averageLogLikelihood = trainer.Run(observationSequences, classLabels); Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood); // Check the output classificaton label for some sequences. int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 }); // output is y1 = 0. Console.WriteLine("output class = {0}", y1); int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y2 = 0. Console.WriteLine("output class = {0}", y2); int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y3 = 1. Console.WriteLine("output class = {0}", y3); int y4 = hmc.Compute(new[] { 2, 2, 1, 1 }); // output is y4 = 1. Console.WriteLine("output class = {0}", y4); int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y5 = 2. Console.WriteLine("output class = {0}", y4); int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y6 = 2. Console.WriteLine("output class = {0}", y6); }
private void btnLearnHMM_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList <Sequence> samples = database.Samples; BindingList <String> classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int states = 5; int iterations = 0; double tolerance = 0.01; bool rejection = false; hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count, new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm) { // Train each model using the selected convergence criteria Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } }; teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm teacher.Learn(inputs, outputs); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = hmm.Decide(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } btnLearnHCRF.Enabled = true; hcrf = null; }
public TrainResult TrainAll(Dictionary<string, IList<ISoundSignalReader>> signalsDictionary, SignalVisitor voiceVisitor = null) { var numberOfItems = 0; foreach (var item in signalsDictionary) { numberOfItems += item.Value.Count; } double[][][][] featuresInput = new Double[signalsDictionary.Count][][][]; int[] models = new int[numberOfItems]; var allSignalIndex = 0; var modelIndex = 0; var featureUtility = new FeatureUtility(_engineParameters); foreach (var item in signalsDictionary) { var signals = item.Value; // signals var signalsCount = signals.Count(); featuresInput[modelIndex] = new double[signalsCount][][]; for (var signalIndex = 0; signalIndex < signalsCount; signalIndex++) { var signal = signals[signalIndex]; List<Double[]> features = featureUtility.ExtractFeatures(signal, voiceVisitor).First(); featuresInput[modelIndex][signalIndex] = features.ToArray(); models[allSignalIndex] = modelIndex; allSignalIndex++; } modelIndex++; } List<int[]> observables = new List<int[]>(); for (int wordIndex = 0; wordIndex < featuresInput.Length; wordIndex++) // foreach word { for (var signalIndex = 0; signalIndex < featuresInput[wordIndex].Length; signalIndex++) // foreach word signal { var points = featuresInput[wordIndex][signalIndex].Select(item => new Point(item)); // convert feature to points var codeItems = _codeBook.Quantize(points.ToArray()); observables.Add(codeItems); } } //HiddenMarkovModel hmm = new HiddenMarkovModel(5, _codeBook.Size, true); //var Bauc var hmm = new HiddenMarkovClassifier(signalsDictionary.Count, new Forward(_numberOfHiddenStates), _codeBook.Size, signalsDictionary.Keys.ToArray()); const int iterations = 200; const double tolerance = 0; var teacher = new HiddenMarkovClassifierLearning(hmm, i => new ViterbiLearning(hmm.Models[i]) {Iterations = iterations, Tolerance = tolerance} ); teacher.Run(observables.ToArray(), models); return new TrainResult {Catalog = _codeBook, Models = hmm.Models.ToArray()}; }
public void LearnTest6() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var density = new MultivariateNormalDistribution(2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>( 2, new Custom(new double[2, 2], new double[2]), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true } } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double response1, response2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out response1); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out response2); Assert.AreEqual(double.NegativeInfinity, logLikelihood); Assert.AreEqual(0, response1); Assert.AreEqual(0, response2); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(response1)); Assert.IsFalse(double.IsNaN(response2)); }
public void LearnTest7() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var initialDensity = new MultivariateNormalDistribution(2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>( classes: 2, topology: new Forward(2), initial: initialDensity); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true, // only diagonal covariance matrices Regularization = 1e-5 // avoid non-positive definite errors } } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood, likelihood2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-24.560663315259973, logLikelihood, 1e-10); Assert.AreEqual(0.99999999998805045, likelihood, 1e-10); Assert.AreEqual(0.99999999998805045, likelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(likelihood)); Assert.IsFalse(double.IsNaN(likelihood2)); }
private static HiddenMarkovClassifier<NormalDistribution> createClassifier( out double[][] sequences, bool rejection = false) { sequences = new double[][] { new double[] { 0,1,2,3,4 }, new double[] { 4,3,2,1,0 }, }; int[] labels = { 0, 1 }; NormalDistribution density = new NormalDistribution(); HiddenMarkovClassifier<NormalDistribution> classifier = new HiddenMarkovClassifier<NormalDistribution>(2, new Ergodic(2), density); var teacher = new HiddenMarkovClassifierLearning<NormalDistribution>(classifier, modelIndex => new BaumWelchLearning<NormalDistribution>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); teacher.Rejection = rejection; teacher.Run(sequences, labels); return classifier; }
public void LearnTest2() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,0,1,2 }, // Class 0 new int[] { 0,1,1,2 }, // Class 0 new int[] { 0,0,0,1,2 }, // Class 0 new int[] { 0,1,2,2,2 }, // Class 0 new int[] { 2,2,1,0 }, // Class 1 new int[] { 2,2,2,1,0 }, // Class 1 new int[] { 2,2,2,1,0 }, // Class 1 new int[] { 2,2,2,2,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to 3 symbols (0,1,2) int symbols = 3; // Nested models will have 3 states each int[] states = new int[] { 3, 3 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Enable support for sequence rejection teacher.Rejection = true; // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); HiddenMarkovModel threshold = classifier.Threshold; Assert.AreEqual(6, threshold.States); Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10); Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10); Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10); for (int i = 0; i < 3; i++) for (int j = 3; j < 6; j++) Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]); for (int i = 3; i < 6; i++) for (int j = 0; j < 3; j++) Assert.AreEqual(Double.NegativeInfinity, threshold.Transitions[i, j]); Assert.IsFalse(Matrix.HasNaN(threshold.Transitions)); classifier.Sensitivity = 0.5; // Will assert the models have learned the sequences correctly. for (int i = 0; i < inputs.Length; i++) { int expected = outputs[i]; int actual = classifier.Compute(inputs[i], out likelihood); Assert.AreEqual(expected, actual); } int[] r0 = new int[] { 1, 1, 0, 0, 2 }; double logRejection; int c = classifier.Compute(r0, out logRejection); Assert.AreEqual(-1, c); Assert.AreEqual(0.99906957195279988, logRejection); Assert.IsFalse(double.IsNaN(logRejection)); logRejection = threshold.Evaluate(r0); Assert.AreEqual(-4.5653702970734793, logRejection, 1e-10); Assert.IsFalse(double.IsNaN(logRejection)); threshold.Decode(r0, out logRejection); Assert.AreEqual(-8.21169955167614, logRejection, 1e-10); Assert.IsFalse(double.IsNaN(logRejection)); foreach (var model in classifier.Models) { double[,] A = model.Transitions; for (int i = 0; i < A.GetLength(0); i++) { double[] row = A.Exp().GetRow(i); double sum = row.Sum(); Assert.AreEqual(1, sum, 1e-10); } } { double[,] A = classifier.Threshold.Transitions; for (int i = 0; i < A.GetLength(0); i++) { double[] row = A.GetRow(i); double sum = row.Exp().Sum(); Assert.AreEqual(1, sum, 1e-6); } } }
public void LearnTest4() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0 }, new double[] { 1 }, new double[] { 2 }, new double[] { 3 }, new double[] { 4 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4 }, new double[] { 3 }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; // Create a mixture of two 1-dimensional normal distributions (by default, // initialized with zero mean and unit covariance matrices). var density = new MultivariateMixture<MultivariateNormalDistribution>( new MultivariateNormalDistribution(1), new MultivariateNormalDistribution(1)); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateMixture<MultivariateNormalDistribution>>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateMixture<MultivariateNormalDistribution>>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateMixture<MultivariateNormalDistribution>>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood1, likelihood2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood1); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-13.271981026832933, logLikelihood, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(likelihood1)); Assert.IsFalse(double.IsNaN(likelihood2)); }
static void runDiscreteDensityHiddenMarkovClassifierLearningExample() { // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols. int[][] observationSequences = { // First class of sequences: starts and ends with zeros, ones in the middle. new[] { 0, 1, 1, 1, 0 }, new[] { 0, 0, 1, 1, 0, 0 }, new[] { 0, 1, 1, 1, 1, 0 }, // Second class of sequences: starts with twos and switches to ones until the end. new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 }, // Third class of sequences: can start with any symbols, but ends with three. new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 0, 0, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 2, 2, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 2, 2, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 3, 3, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, }; // Consider their respective class labels. // Class labels have to be zero-based and successive integers. int[] classLabels = { 0, 0, 0, // Sequences 1-3 are from class 0. 1, 1, 1, // Sequences 4-6 are from class 1. 2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2. }; // Use a single topology for all inner models. ITopology forward = new Forward(states: 3); // Create a hidden Markov classifier with the given topology. HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4); // Create a algorithms to teach each of the inner models. var trainer = new HiddenMarkovClassifierLearning( hmc, // Specify individual training options for each inner model. modelIndex => new BaumWelchLearning(hmc.Models[modelIndex]) { Tolerance = 0.001, // iterate until log-likelihood changes less than 0.001. Iterations = 0 // don't place an upper limit on the number of iterations. } ); // Call its Run method to start learning. double averageLogLikelihood = trainer.Run(observationSequences, classLabels); Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood); // Check the output classificaton label for some sequences. int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 }); // output is y1 = 0. Console.WriteLine("output class = {0}", y1); int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y2 = 0. Console.WriteLine("output class = {0}", y2); int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y3 = 1. Console.WriteLine("output class = {0}", y3); int y4 = hmc.Compute(new[] { 2, 2, 1, 1 }); // output is y4 = 1. Console.WriteLine("output class = {0}", y4); int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y5 = 2. Console.WriteLine("output class = {0}", y4); int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y6 = 2. Console.WriteLine("output class = {0}", y6); }
/// <summary> /// Trains the ensemble /// </summary> private void btnTrain_Click(object sender, EventArgs e) { DataTable source = dgvSequenceSource.DataSource as DataTable; if (source == null || hmmc == null) { MessageBox.Show("Please create a sequence classifier first."); return; } int rows = source.Rows.Count; // Gets the input sequences int[][] sequences = new int[rows][]; int[] labels = new int[rows]; for (int i = 0; i < rows; i++) { DataRow row = source.Rows[i]; string label = row["Label"] as string; for (int j = 0; j < hmmc.Models.Length; j++) { if (hmmc.Models[j].Tag.Equals(label)) { labels[i] = j; break; } } sequences[i] = decode(row["Sequences"] as string); } // Grab training parameters int iterations = (int)numIterations.Value; double limit = (double)numConvergence.Value; if (rbStopIterations.Checked) { limit = 0; } else { iterations = 0; } // Train the ensemble var teacher = new HiddenMarkovClassifierLearning(hmmc, i => new BaumWelchLearning(hmmc.Models[i]) { Iterations = iterations, Tolerance = limit } ); teacher.Run(sequences, labels); // Update the GUI dgvModels_CurrentCellChanged(this, EventArgs.Empty); }
public void LearnTest2() { // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,0,1,2 }, // Class 0 new int[] { 0,1,1,2 }, // Class 0 new int[] { 0,0,0,1,2 }, // Class 0 new int[] { 0,1,2,2,2 }, // Class 0 new int[] { 2,2,1,0 }, // Class 1 new int[] { 2,2,2,1,0 }, // Class 1 new int[] { 2,2,2,1,0 }, // Class 1 new int[] { 2,2,2,2,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; // We are trying to predict two different classes int classes = 2; // Each sequence may have up to 3 symbols (0,1,2) int symbols = 3; // Nested models will have 3 states each int[] states = new int[] { 3, 3 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Enable support for sequence rejection teacher.Rejection = true; // Train the sequence classifier using the algorithm double likelihood = teacher.Run(inputs, outputs); // Will assert the models have learned the sequences correctly. for (int i = 0; i < inputs.Length; i++) { int expected = outputs[i]; int actual = classifier.Compute(inputs[i], out likelihood); Assert.AreEqual(expected, actual); } HiddenMarkovModel threshold = classifier.Threshold; Assert.AreEqual(6, threshold.States); Assert.AreEqual(classifier.Models[0].Transitions[0, 0], threshold.Transitions[0, 0], 1e-10); Assert.AreEqual(classifier.Models[0].Transitions[1, 1], threshold.Transitions[1, 1], 1e-10); Assert.AreEqual(classifier.Models[0].Transitions[2, 2], threshold.Transitions[2, 2], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[0, 0], threshold.Transitions[3, 3], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[1, 1], threshold.Transitions[4, 4], 1e-10); Assert.AreEqual(classifier.Models[1].Transitions[2, 2], threshold.Transitions[5, 5], 1e-10); Assert.IsFalse(Matrix.HasNaN(threshold.Transitions)); int[] r0 = new int[] { 1, 1, 0, 0, 2 }; double logRejection; int c = classifier.Compute(r0, out logRejection); Assert.AreEqual(-1, c); Assert.AreEqual(0.99569011079012049, logRejection); Assert.IsFalse(double.IsNaN(logRejection)); logRejection = threshold.Evaluate(r0); Assert.AreEqual(-6.7949285513628528, logRejection, 1e-10); Assert.IsFalse(double.IsNaN(logRejection)); threshold.Decode(r0, out logRejection); Assert.AreEqual(-8.902077561009957, logRejection, 1e-10); Assert.IsFalse(double.IsNaN(logRejection)); }
public static HiddenMarkovClassifier<MultivariateNormalDistribution> CreateModel3( int states = 4, bool priors = true) { MultivariateNormalDistribution density = new MultivariateNormalDistribution(2); var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>(6, new Forward(states), density); string[] labels = { "1", "2", "3", "4", "5", "6" }; for (int i = 0; i < classifier.Models.Length; i++) classifier.Models[i].Tag = labels[i]; // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(classifier, // Train each model using the selected convergence criteria i => new BaumWelchLearning<MultivariateNormalDistribution>(classifier.Models[i]) { Tolerance = 0.1, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true, Regularization = 1e-10 } } ); teacher.Empirical = priors; // Run the learning algorithm teacher.Run(inputTest, outputTest); return classifier; }
private void btnLearnHMM_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList<Sequence> samples = database.Samples; BindingList<String> classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int states = 5; int iterations = 0; double tolerance = 0.01; bool rejection = false; hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(classes.Count, new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } ); teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm double error = teacher.Run(inputs, outputs); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = hmm.Compute(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } btnLearnHCRF.Enabled = true; }
public void LearnTest5() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var density = new MultivariateNormalDistribution(2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier) { // Train each model until the log-likelihood changes less than 0.0001 Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true } } }; // Train the sequence classifier using the algorithm teacher.Learn(sequences, labels); double logLikelihood = teacher.LogLikelihood; // Calculate the probability that the given // sequences originated from the model double logLikelihood1, logLikelihood2; int c1, c2; // Try to classify the 1st sequence (output should be 0) logLikelihood1 = classifier.Probability(sequences[0], out c1); // Try to classify the 2nd sequence (output should be 1) logLikelihood2 = classifier.Probability(sequences[1], out c2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-24.560599651649841, logLikelihood, 1e-10); Assert.AreEqual(0.99999999998806466, logLikelihood1, 1e-10); Assert.AreEqual(0.99999999998806466, logLikelihood2, 1e-10); }
public static HiddenMarkovClassifier<Independent> CreateModel3() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var comp3 = new NormalDistribution(2); var comp4 = new NormalDistribution(3); var comp5 = new NormalDistribution(4); var density = new Independent(comp1, comp2, comp3, comp4, comp5); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<Independent>( 2, new Forward(5), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences2, labels2); return classifier; }
public static HiddenMarkovClassifier <Independent> CreateModel3(out double[][][] sequences2, out int[] labels2) { sequences2 = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 1, 1.12, 2.41, 1.17, 9.3 }, new double[] { 1, 2.54, 1.45, 0.16, 4.5 }, new double[] { 1, 3.46, 2.63, 1.15, 9.2 }, new double[] { 1, 4.73, 0.41, 1.54, 5.5 }, new double[] { 2, 5.81, 2.42, 1.13, 9.1 }, }, new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1.49, 2.48, 1.18, 9.37 }, new double[] { 1, 2.18, 1.44, 2.19, 1.56 }, new double[] { 1, 3.77, 2.62, 1.10, 9.25 }, new double[] { 2, 4.76, 5.44, 3.58, 5.54 }, new double[] { 2, 5.85, 2.46, 1.16, 5.13 }, new double[] { 2, 4.84, 5.44, 3.54, 5.52 }, new double[] { 2, 5.83, 3.41, 1.22, 5.11 }, }, new double[][] { // This is the first sequence with label = 0 new double[] { 2, 1.11, 2.41, 1.12, 2.31 }, new double[] { 1, 2.52, 3.73, 0.12, 4.50 }, new double[] { 1, 3.43, 2.61, 1.24, 9.29 }, new double[] { 1, 4.74, 2.42, 2.55, 6.57 }, new double[] { 2, 5.85, 2.43, 1.16, 9.16 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 0, 1.26, 5.44, 1.56, 9.55 }, new double[] { 2, 2.67, 5.45, 4.27, 1.54 }, new double[] { 1, 1.28, 3.46, 2.18, 4.13 }, new double[] { 1, 5.89, 2.57, 1.79, 5.02 }, new double[] { 0, 1.40, 2.48, 2.10, 6.41 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 2, 3.21, 2.49, 1.54, 9.17 }, new double[] { 2, 2.62, 5.40, 4.25, 1.54 }, new double[] { 1, 1.53, 6.49, 2.17, 4.52 }, new double[] { 1, 2.84, 2.58, 1.73, 6.04 }, new double[] { 1, 1.45, 2.47, 2.28, 5.42 }, new double[] { 1, 1.46, 2.46, 2.35, 5.41 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 1, 5.27, 5.45, 1.4, 9.5 }, new double[] { 2, 2.68, 2.54, 3.2, 2.2 }, new double[] { 1, 2.89, 3.83, 2.6, 4.1 }, new double[] { 1, 1.80, 1.32, 1.2, 4.2 }, new double[] { 0, 1.41, 2.41, 2.1, 6.4 }, } }; labels2 = new[] { 0, 0, 0, 1, 1, 1 }; // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var comp3 = new NormalDistribution(2); var comp4 = new NormalDistribution(3); var comp5 = new NormalDistribution(4); var density = new Independent(comp1, comp2, comp3, comp4, comp5); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier <Independent>( 2, new Forward(5), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning <Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences2, labels2); Assert.AreEqual(-3.0493028798326081d, logLikelihood, 1e-10); return(classifier); }
public void learn_test() { Accord.Math.Random.Generator.Seed = 0; #region doc_learn_1 // Let's say we would like to do a very simple mechanism for gesture recognition. // In this example, we will be trying to create a classifier that can distinguish // between the words "hello", "car", and "wardrobe". // Let's say we decided to acquire some data, and we asked some people to perform // those words in front of a Kinect camera, and, using Microsoft's SDK, we were able // to captured the x and y coordinates of each hand while the word was being performed. // Let's say we decided to represent our frames as: // // double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY }; // 4 dimensions // // Since we captured words, this means we captured sequences of frames as we described // above. Let's write some of those as rough examples to explain how gesture recognition // can be done: double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; // Please note that a real-world example would involve *lots* of samples for each word. // Here, we are considering just one from each class which is clearly sub-optimal and // should _never_ be done on practice. Please keep in mind that we are doing like this // only to simplify this example on how to create and use HCRFs. // These are the words we have in our vocabulary: double[][][] words = { hello, car, wardrobe }; // Now, let's associate integer labels with them. This is needed // for the case where there are multiple samples for each word. int[] labels = { 0, 1, 2 }; // Create a new learning algorithm to train the hidden Markov model sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution>, double[]>() { // Train each model until the log-likelihood changes less than 0.001 Learner = (i) => new BaumWelchLearning <Independent <NormalDistribution>, double[]>() { Topology = new Forward(5), // this value can be found by trial-and-error // We will create our classifiers assuming an independent Gaussian distribution // for each component in our feature vectors (assuming a Naive Bayes assumption). Emissions = (s) => new Independent <NormalDistribution>(dimensions: 4), // 4 dimensions Tolerance = 0.001, Iterations = 100, // This is necessary so the code doesn't blow up when it realizes there is only one // sample per word class. But this could also be needed in normal situations as well: FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } }; // PS: In case you find exceptions trying to configure your model, you might want // to try disabling parallel processing to get more descriptive error messages: // teacher.ParallelOptions.MaxDegreeOfParallelism = 1; // Finally, we can run the learning algorithm! var hmm = teacher.Learn(words, labels); double logLikelihood = teacher.LogLikelihood; // At this point, the classifier should be successfully // able to distinguish between our three word classes: // int tc1 = hmm.Decide(hello); // should be 0 int tc2 = hmm.Decide(car); // should be 1 int tc3 = hmm.Decide(wardrobe); // should be 2 #endregion Assert.AreEqual(0, tc1); Assert.AreEqual(1, tc2); Assert.AreEqual(2, tc3); #region doc_learn_2 // Now, we can use the Markov classifier to initialize a HCRF var baseline = HiddenConditionalRandomField.FromHiddenMarkov(hmm); // We can check that both are equivalent, although they have // formulations that can be learned with different methods: int[] predictedLabels = baseline.Decide(words); #endregion // We can check that both are equivalent, although they have // formulations that can be learned with different methods // for (int i = 0; i < words.Length; i++) { // Should be the same int expected = hmm.Decide(words[i]); int actual = baseline.Decide(words[i]); // Should be the same double h0 = hmm.LogLikelihood(words[i], 0); double c0 = baseline.LogLikelihood(words[i], 0); double h1 = hmm.LogLikelihood(words[i], 1); double c1 = baseline.LogLikelihood(words[i], 1); double h2 = hmm.LogLikelihood(words[i], 2); double c2 = baseline.LogLikelihood(words[i], 2); Assert.AreEqual(expected, predictedLabels[i]); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10)); Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10)); } Accord.Math.Random.Generator.Seed = 0; #region doc_learn_3 // Now we can learn the HCRF using one of the best learning // algorithms available, Resilient Backpropagation learning: // Create the Resilient Backpropagation learning algorithm var rprop = new HiddenResilientGradientLearning <double[]>() { Function = baseline.Function, // use the same HMM function Iterations = 50, Tolerance = 1e-5 }; // Run the algorithm and learn the models var hcrf = rprop.Learn(words, labels); // At this point, the HCRF should be successfully // able to distinguish between our three word classes: // int hc1 = hcrf.Decide(hello); // should be 0 int hc2 = hcrf.Decide(car); // should be 1 int hc3 = hcrf.Decide(wardrobe); // should be 2 #endregion Assert.AreEqual(0, hc1); Assert.AreEqual(1, hc2); Assert.AreEqual(2, hc3); }
public static HiddenMarkovClassifier <Independent <NormalDistribution> > CreateModel4(out double[][][] words, out int[] labels, bool usePriors) { double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; double[][] wardrobe2 = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.2 }, }; words = new double[][][] { hello, car, wardrobe, wardrobe2 }; labels = new [] { 0, 1, 2, 2 }; var initial = new Independent <NormalDistribution> ( new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1) ); int numberOfWords = 3; int numberOfStates = 5; var classifier = new HiddenMarkovClassifier <Independent <NormalDistribution> > ( classes: numberOfWords, topology: new Forward(numberOfStates), initial: initial ); var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(classifier, modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } ); if (usePriors) { teacher.Empirical = true; } double logLikelihood = teacher.Run(words, labels); Assert.AreEqual(208.38345600145777d, logLikelihood); return(classifier); }
public void CrossvalidationConstructorTest2() { Accord.Math.Tools.SetupGenerator(0); // This is a sample code on how to use Cross-Validation // to assess the performance of Hidden Markov Models. // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,0,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,0,1,0 }, // Class 0 new int[] { 0,1,0 }, // Class 0 new int[] { 0,1,1,0 }, // Class 0 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0,0,0,0,0,0,0, // First 10 sequences are of class 0 1,1,1,1,1,1,1,1,1,1, // Last 10 sequences are of class 1 }; // Create a new Cross-validation algorithm passing the data set size and the number of folds var crossvalidation = new CrossValidation<HiddenMarkovClassifier>(size: inputs.Length, folds: 3); // Define a fitting function using Support Vector Machines. The objective of this // function is to learn a SVM in the subset of the data indicated by cross-validation. crossvalidation.Fitting = delegate(int k, int[] indicesTrain, int[] indicesValidation) { // The fitting function is passing the indices of the original set which // should be considered training data and the indices of the original set // which should be considered validation data. // Lets now grab the training data: var trainingInputs = inputs.Submatrix(indicesTrain); var trainingOutputs = outputs.Submatrix(indicesTrain); // And now the validation data: var validationInputs = inputs.Submatrix(indicesValidation); var validationOutputs = outputs.Submatrix(indicesValidation); // We are trying to predict two different classes int classes = 2; // Each sequence may have up to two symbols (0 or 1) int symbols = 2; // Nested models will have two states each int[] states = new int[] { 2, 2 }; // Creates a new Hidden Markov Model Classifier with the given parameters HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(classes, states, symbols); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 0 } ); // Train the sequence classifier using the algorithm double likelihood = teacher.Run(trainingInputs, trainingOutputs); double trainingError = teacher.ComputeError(trainingInputs, trainingOutputs); // Now we can compute the validation error on the validation data: double validationError = teacher.ComputeError(validationInputs, validationOutputs); // Return a new information structure containing the model and the errors achieved. return new CrossValidationValues<HiddenMarkovClassifier>(classifier, trainingError, validationError); }; // Compute the cross-validation var result = crossvalidation.Compute(); // Finally, access the measured performance. double trainingErrors = result.Training.Mean; double validationErrors = result.Validation.Mean; Assert.AreEqual(3, crossvalidation.K); Assert.AreEqual(0, result.Training.Mean); Assert.AreEqual(0.055555555555555552, result.Validation.Mean); Assert.AreEqual(3, crossvalidation.Folds.Length); Assert.AreEqual(3, result.Models.Length); }
public void LearnTest6() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var density = new MultivariateNormalDistribution(2); try { new HiddenMarkovClassifier<MultivariateNormalDistribution>( 2, new Custom(new double[2, 2], new double[2]), density); Assert.Fail(); } catch (ArgumentException) { } var topology = new Custom( new[,] { { 1 / 2.0, 1 / 2.0 }, { 1 / 2.0, 1 / 2.0 } }, new[] { 1.0, 0.0 }); Array.Clear(topology.Initial, 0, topology.Initial.Length); Array.Clear(topology.Transitions, 0, topology.Transitions.Length); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution, double[]>( 2, topology, density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution, double[]>(classifier) { // Train each model until the log-likelihood changes less than 0.0001 Learner = modelIndex => new BaumWelchLearning<MultivariateNormalDistribution, double[]>(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true } } }; // Train the sequence classifier using the algorithm teacher.Learn(sequences, labels); double logLikelihood = teacher.LogLikelihood; // Calculate the probability that the given // sequences originated from the model double response1, response2; // Try to classify the first sequence (output should be 0) int c1 = classifier.Decide(sequences[0]); response1 = classifier.Probability(sequences[0]); // Try to classify the second sequence (output should be 1) int c2 = classifier.Decide(sequences[1]); response2 = classifier.Probability(sequences[1]); Assert.AreEqual(double.NegativeInfinity, logLikelihood); Assert.AreEqual(0, response1); Assert.AreEqual(0, response2); }
private void btnTrain_Click(object sender, EventArgs e) { if (dataGridView1.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } int states = (int)numStates.Value; int iterations = (int)numIterations.Value; double tolerance = (double)numConvergence.Value; if (rbStopIterations.Checked) tolerance = 0.0; if (rbStopConvergence.Checked) iterations = 0; // Retrieve the training data from the data grid view int rows = dataGridView1.Rows.Count; int[] outputs = new int[rows]; var sequences = new int[rows][]; for (int i = 0; i < rows; i++) { outputs[i] = (int)dataGridView1.Rows[i].Cells["colLabel"].Value - 1; sequences[i] = GetFeatures((double[][])dataGridView1.Rows[i].Tag); } int classes = outputs.Distinct().Count(); string[] labels = new string[classes]; for (int i = 0; i < labels.Length; i++) labels[i] = (i+1).ToString(); // Create a sequence classifier for 3 classes classifier = new HiddenMarkovClassifier(labels.Length, new Forward(states), symbols: 20, names: labels); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning(classifier, // Train each model using the selected convergence criteria i => new BaumWelchLearning(classifier.Models[i]) { Tolerance = tolerance, Iterations = iterations, } ); // Create and use a rejection threshold model teacher.Rejection = cbRejection.Checked; teacher.Empirical = true; teacher.Smoothing = (double)numSmoothing.Value; // Run the learning algorithm teacher.Run(sequences, outputs); double error = classifier.LogLikelihood(sequences, outputs); int hits = 0; toolStripProgressBar1.Visible = true; toolStripProgressBar1.Value = 0; toolStripProgressBar1.Step = 1; toolStripProgressBar1.Maximum = dataGridView1.Rows.Count; for (int i = 0; i < rows; i++) { double likelihood; int index = classifier.Compute(sequences[i], out likelihood); DataGridViewRow row = dataGridView1.Rows[i]; if (index == -1) { row.Cells["colClassification"].Value = String.Empty; } else { row.Cells["colClassification"].Value = classifier.Models[index].Tag; } int expected = (int)row.Cells["colLabel"].Value; if (expected == index + 1) { row.Cells[0].Style.BackColor = Color.LightGreen; row.Cells[1].Style.BackColor = Color.LightGreen; row.Cells[2].Style.BackColor = Color.LightGreen; hits++; } else { row.Cells[0].Style.BackColor = Color.White; row.Cells[1].Style.BackColor = Color.White; row.Cells[2].Style.BackColor = Color.White; } toolStripProgressBar1.PerformStep(); } dgvModels.DataSource = classifier.Models; toolStripProgressBar1.Visible = false; toolStripStatusLabel1.Text = String.Format("Training complete. Hits: {0}/{1} ({2:0%})", hits, dataGridView1.Rows.Count, (double)hits / dataGridView1.Rows.Count); }
public static HiddenMarkovClassifier<Independent> CreateModel2(out double[][][] sequences, out int[] labels) { sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1.1 }, new double[] { 1, 2.5 }, new double[] { 1, 3.4 }, new double[] { 1, 4.7 }, new double[] { 2, 5.8 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 2, 3.2 }, new double[] { 2, 2.6 }, new double[] { 1, 1.2 }, new double[] { 1, 0.8 }, new double[] { 0, 1.1 }, } }; labels = new[] { 0, 1 }; // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var density = new Independent(comp1, comp2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<Independent>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); return classifier; }
public void LearnTest9() { double[][][] inputs = large_gestures; int[] outputs = large_outputs; int states = 5; int iterations = 100; double tolerance = 0.01; bool rejection = true; double sensitivity = 1E-85; int dimension = inputs[0][0].Length; var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Forward(states), new MultivariateNormalDistribution(dimension)); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } ); teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm double logLikelihood = teacher.Run(inputs, outputs); hmm.Sensitivity = sensitivity; for (int i = 0; i < large_gestures.Length; i++) { int actual = hmm.Compute(large_gestures[i]); int expected = large_outputs[i]; Assert.AreEqual(expected,actual); } }
public static HiddenMarkovClassifier<MultivariateNormalDistribution> CreateModel2() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1 }, new double[] { 1, 2 }, new double[] { 2, 3 }, new double[] { 3, 4 }, new double[] { 4, 5 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4, 3 }, new double[] { 3, 2 }, new double[] { 2, 1 }, new double[] { 1, 0 }, new double[] { 0, -1 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; var density = new MultivariateNormalDistribution(2); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateNormalDistribution>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateNormalDistribution>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, FittingOptions = new NormalOptions() { Diagonal = true } } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); return classifier; }
public static HiddenMarkovClassifier<Independent> CreateModel3(out double[][][] sequences2, out int[] labels2) { sequences2 = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 1, 1.12, 2.41, 1.17, 9.3 }, new double[] { 1, 2.54, 1.45, 0.16, 4.5 }, new double[] { 1, 3.46, 2.63, 1.15, 9.2 }, new double[] { 1, 4.73, 0.41, 1.54, 5.5 }, new double[] { 2, 5.81, 2.42, 1.13, 9.1 }, }, new double[][] { // This is the first sequence with label = 0 new double[] { 0, 1.49, 2.48, 1.18, 9.37 }, new double[] { 1, 2.18, 1.44, 2.19, 1.56 }, new double[] { 1, 3.77, 2.62, 1.10, 9.25 }, new double[] { 2, 4.76, 5.44, 3.58, 5.54 }, new double[] { 2, 5.85, 2.46, 1.16, 5.13 }, new double[] { 2, 4.84, 5.44, 3.54, 5.52 }, new double[] { 2, 5.83, 3.41, 1.22, 5.11 }, }, new double[][] { // This is the first sequence with label = 0 new double[] { 2, 1.11, 2.41, 1.12, 2.31 }, new double[] { 1, 2.52, 3.73, 0.12, 4.50 }, new double[] { 1, 3.43, 2.61, 1.24, 9.29 }, new double[] { 1, 4.74, 2.42, 2.55, 6.57 }, new double[] { 2, 5.85, 2.43, 1.16, 9.16 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 0, 1.26, 5.44, 1.56, 9.55 }, new double[] { 2, 2.67, 5.45, 4.27, 1.54 }, new double[] { 1, 1.28, 3.46, 2.18, 4.13 }, new double[] { 1, 5.89, 2.57, 1.79, 5.02 }, new double[] { 0, 1.40, 2.48, 2.10, 6.41 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 2, 3.21, 2.49, 1.54, 9.17 }, new double[] { 2, 2.62, 5.40, 4.25, 1.54 }, new double[] { 1, 1.53, 6.49, 2.17, 4.52 }, new double[] { 1, 2.84, 2.58, 1.73, 6.04 }, new double[] { 1, 1.45, 2.47, 2.28, 5.42 }, new double[] { 1, 1.46, 2.46, 2.35, 5.41 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 1, 5.27, 5.45, 1.4, 9.5 }, new double[] { 2, 2.68, 2.54, 3.2, 2.2 }, new double[] { 1, 2.89, 3.83, 2.6, 4.1 }, new double[] { 1, 1.80, 1.32, 1.2, 4.2 }, new double[] { 0, 1.41, 2.41, 2.1, 6.4 }, } }; labels2 = new[] { 0, 0, 0, 1, 1, 1 }; // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. var comp1 = new GeneralDiscreteDistribution(3); var comp2 = new NormalDistribution(1); var comp3 = new NormalDistribution(2); var comp4 = new NormalDistribution(3); var comp5 = new NormalDistribution(4); var density = new Independent(comp1, comp2, comp3, comp4, comp5); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<Independent>( 2, new Forward(5), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<Independent>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<Independent>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences2, labels2); return classifier; }
private static HiddenMarkovClassifier createClassifier( out int[][] sequences, bool rejection = false) { sequences = new int[][] { new int[] { 0,1,2,3,4 }, new int[] { 4,3,2,1,0 }, }; int[] labels = { 0, 1 }; HiddenMarkovClassifier classifier = new HiddenMarkovClassifier(2, new Ergodic(2), symbols: 5); var teacher = new HiddenMarkovClassifierLearning(classifier, modelIndex => new BaumWelchLearning(classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0 } ); teacher.Rejection = rejection; teacher.Run(sequences, labels); return classifier; }
public static HiddenMarkovClassifier<Independent<NormalDistribution>> CreateModel4(out double[][][] words, out int[] labels, bool usePriors) { double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; double[][] wardrobe2 = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.2, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.2 }, }; words = new double[][][] { hello, car, wardrobe, wardrobe2 }; labels = new [] { 0, 1, 2, 2 }; var initial = new Independent<NormalDistribution> ( new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1) ); int numberOfWords = 3; int numberOfStates = 5; var classifier = new HiddenMarkovClassifier<Independent<NormalDistribution>> ( classes: numberOfWords, topology: new Forward(numberOfStates), initial: initial ); var teacher = new HiddenMarkovClassifierLearning<Independent<NormalDistribution>>(classifier, modelIndex => new BaumWelchLearning<Independent<NormalDistribution>>(classifier.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } ); if (usePriors) teacher.Empirical = true; double logLikelihood = teacher.Run(words, labels); return classifier; }