public HMMGenerator(PatchNames instrument) { this.book = new Codebook<Note>(); this.instrument = instrument; DotNetLearn.Data.SampleSet asdasd; Accord.Math.Tools.SetupGenerator(10); // Consider some phrases: // string[][] phrases = { "The Big Brown Fox Jumps Over the Ugly Dog".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "This is too hot to handle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "I am flying away like a gold eagle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Onamae wa nan desu ka".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "And then she asked, why is it so small?".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Great stuff John! Now you will surely be promoted".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Jayne was taken aback when she found out her son was gay".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), }; // Let's begin by transforming them to sequence of // integer labels using a codification codebook: var codebook = new Codification("Words", phrases); // Now we can create the training data for the models: int[][] sequence = codebook.Translate("Words", phrases); // To create the models, we will specify a forward topology, // as the sequences have definite start and ending points. // var topology = new Forward(states: codebook["Words"].Symbols); int symbols = codebook["Words"].Symbols; // We have 7 different words // Create the hidden Markov model HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols); // Create the learning algorithm var teacher = new ViterbiLearning(hmm); // Teach the model about the phrases double error = teacher.Run(sequence); // Now, we can ask the model to generate new samples // from the word distributions it has just learned: // List<int> sample = new List<int>(); int count = 10; sample.Add(hmm.Generate(1)[0]); while(sample.Count < count) { var k = hmm.Predict(sample.ToArray(), 1); sample.AddRange(k); } // And the result will be: "those", "are", "words". string[] result = codebook.Translate("Words", sample.ToArray()); }
public void GenerateTest2() { Accord.Math.Tools.SetupGenerator(42); // Consider some phrases: // string[][] phrases = { new[] { "those", "are", "sample", "words", "from", "a", "dictionary" }, new[] { "those", "are", "sample", "words" }, new[] { "sample", "words", "are", "words" }, new[] { "those", "words" }, new[] { "those", "are", "words" }, new[] { "words", "from", "a", "dictionary" }, new[] { "those", "are", "words", "from", "a", "dictionary" } }; // Let's begin by transforming them to sequence of // integer labels using a codification codebook: var codebook = new Codification("Words", phrases); // Now we can create the training data for the models: int[][] sequence = codebook.Translate("Words", phrases); // To create the models, we will specify a forward topology, // as the sequences have definite start and ending points. // var topology = new Forward(states: 4); int symbols = codebook["Words"].Symbols; // We have 7 different words // Create the hidden Markov model HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols); // Create the learning algorithm BaumWelchLearning teacher = new BaumWelchLearning(hmm); // Teach the model about the phrases double error = teacher.Run(sequence); // Now, we can ask the model to generate new samples // from the word distributions it has just learned: // int[] sample = hmm.Generate(3); // And the result will be: "those", "are", "words". string[] result = codebook.Translate("Words", sample); Assert.AreEqual("those", result[0]); Assert.AreEqual("are", result[1]); Assert.AreEqual("words", result[2]); }
static void runDiscreteDensityHiddenMarkovClassifierLearningExample() { // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols. int[][] observationSequences = { // First class of sequences: starts and ends with zeros, ones in the middle. new[] { 0, 1, 1, 1, 0 }, new[] { 0, 0, 1, 1, 0, 0 }, new[] { 0, 1, 1, 1, 1, 0 }, // Second class of sequences: starts with twos and switches to ones until the end. new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 }, // Third class of sequences: can start with any symbols, but ends with three. new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 0, 0, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 2, 2, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 2, 2, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 3, 3, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, }; // Consider their respective class labels. // Class labels have to be zero-based and successive integers. int[] classLabels = { 0, 0, 0, // Sequences 1-3 are from class 0. 1, 1, 1, // Sequences 4-6 are from class 1. 2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2. }; // Use a single topology for all inner models. ITopology forward = new Forward(states: 3); // Create a hidden Markov classifier with the given topology. HiddenMarkovClassifier hmc = new HiddenMarkovClassifier(classes: 3, topology: forward, symbols: 4); // Create a algorithms to teach each of the inner models. var trainer = new HiddenMarkovClassifierLearning( hmc, // Specify individual training options for each inner model. modelIndex => new BaumWelchLearning(hmc.Models[modelIndex]) { Tolerance = 0.001, // iterate until log-likelihood changes less than 0.001. Iterations = 0 // don't place an upper limit on the number of iterations. } ); // Call its Run method to start learning. double averageLogLikelihood = trainer.Run(observationSequences, classLabels); Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood); // Check the output classificaton label for some sequences. int y1 = hmc.Compute(new[] { 0, 1, 1, 1, 0 }); // output is y1 = 0. Console.WriteLine("output class = {0}", y1); int y2 = hmc.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y2 = 0. Console.WriteLine("output class = {0}", y2); int y3 = hmc.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y3 = 1. Console.WriteLine("output class = {0}", y3); int y4 = hmc.Compute(new[] { 2, 2, 1, 1 }); // output is y4 = 1. Console.WriteLine("output class = {0}", y4); int y5 = hmc.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y5 = 2. Console.WriteLine("output class = {0}", y4); int y6 = hmc.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y6 = 2. Console.WriteLine("output class = {0}", y6); }
public void ForwardTest3() { var topology = new Forward(states: 3, deepness: 2); double[,] actualA; double[] actualPi; double[,] expectedA; double[] expectedPi; int actualStates = topology.Create(true, out actualA, out actualPi); int expectedStates = topology.Create(false, out expectedA, out expectedPi); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) Assert.AreEqual(actualA[i, j], System.Math.Log(expectedA[i, j])); for (int i = 0; i < 3; i++) for (int j = 0; j < 3; j++) Assert.AreEqual(actualPi[i], System.Math.Log(expectedPi[i])); Assert.AreEqual(actualStates, expectedStates); Assert.AreEqual(actualStates, 3); }
public void ForwardTest2() { var topology = new Forward(3, 2); Assert.AreEqual(topology.States, 3); Assert.AreEqual(topology.Deepness, 2); double[,] actual; double[] pi; int states = topology.Create(false, out actual, out pi); var expected = new double[,] { { 0.50, 0.50, 0.00 }, { 0.00, 0.50, 0.50 }, { 0.00, 0.00, 1.00 }, }; Assert.IsTrue(actual.IsEqual(expected, 0.01)); Assert.AreEqual(states, 3); }
public void Train(CompositionCategory cat) { Accord.Math.Tools.SetupGenerator(42); List<int[]> inputSequences = new List<int[]>(); List<int[]> outputSequences = new List<int[]>(); foreach(Composition comp in cat.Compositions) { if (comp.Tracks.Count < 2) continue; var melInput = comp.Tracks[0].GetMainSequence() as MelodySequence; melInput.Trim(100); melInput.NormalizeNotes(4); var melOutput = comp.Tracks[1].GetMainSequence() as MelodySequence; melOutput.Trim(100); melOutput.NormalizeNotes(4); if (melInput.Length > melOutput.Length) melInput.Trim(melOutput.Length); else if (melOutput.Length > melInput.Length) melOutput.Trim(melInput.Length); book.Add(melInput.Notes); book.Add(melOutput.Notes); inputSequences.Add(book.ToCodes(melInput.ToArray())); outputSequences.Add(book.ToCodes(melOutput.ToArray())); } if (outputSequences.Count != inputSequences.Count) throw new Exception("MSP"); for(int i = 0; i < outputSequences.Count; i++) { if (outputSequences[i].Length != inputSequences[i].Length) throw new Exception("MSP 2"); } var topology = new Forward(states: 50); hmm = new HiddenMarkovModel(20, book.TotalUniqueSymbols + 1); var teacher = new Accord.Statistics.Models.Markov.Learning.MaximumLikelihoodLearning(hmm) {UseLaplaceRule=false /*Tolerance = 0.1, Iterations=0*/}; //var teacher = new ViterbiLearning(hmm); double ll = teacher.Run(outputSequences.ToArray(), inputSequences.ToArray()); Console.WriteLine("Error: {0}", ll); }
/// <summary> /// Trains the model based on the given position data. /// </summary> private void TrainModel() { double trainingLikelihood; double factor = this.trainingSampleCount; int[][] trainingLabels = DataKMeans(); Forward modelTopology = new Forward(statesCount, 2); this.model = new HiddenMarkovModel(modelTopology, alphabetCount); var baumWelchTeacher = new BaumWelchLearning(model); baumWelchTeacher.Run(trainingLabels); for (int i = 0; i < this.trainingSampleCount; i++) { trainingLikelihood = model.Evaluate(trainingLabels[i]); this.recognitionThreshold += trainingLikelihood; } this.recognitionThreshold *= (2 / factor); }