public static HiddenMarkovModel CreateModel2() { int states = 3; int symbols = 3; int[][] sequences = new int[][] { new int[] { 0, 1, 1, 1, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 0, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 1 }, new int[] { 0, 1, 1, 2, 2 }, new int[] { 0, 0, 1, 1, 1, 2, 1 }, new int[] { 0, 0, 0, 1, 1, 1, 2, 1 }, new int[] { 0, 1, 1, 2, 2, 2 }, }; HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(states), symbols); var teacher = new BaumWelchLearning(hmm) { Iterations = 100, Tolerance = 0 }; double ll = teacher.Run(sequences); return hmm; }
private void btnModel_Click(object sender, EventArgs e) { var transition = new double[,] { {2.0/8, 1.0/8, 2.0/8, 3.0/8}, {0, 0, 0, 1.0/8}, {0, 0, 0, 0}, {1, 0, 0, 0}, }; var emission = new[,] { {2.0/8, 0, 0, 0, 0}, {0, 0, 0, 0, 0}, {0, 0, 1, 0, 0}, {0, 1, 0, 1, 0}, }; var start = new double[] {1, 0, 0, 0}; var hmm = new HiddenMarkovModel(transition, emission, start, false); var liklyhood = 0d; var x = hmm.Decode(new[] {1}, out liklyhood); }
public int Recognize(ISoundSignalReader signal, HiddenMarkovModel[] models, out string name, SignalVisitor voiceVisitor = null) { var featureUtility = new FeatureUtility(_engineParameters); signal.Reset(); var features = featureUtility.ExtractFeatures(signal, voiceVisitor).First(); var observations = _codeBook.Quantize(features.Select(item => new Point(item)).ToArray()); var likelyHoodValue = Double.MinValue; name = string.Empty; var index = 0; var ret = 0; foreach (var model in models) { var val = model.Evaluate(observations); if (val > likelyHoodValue) { likelyHoodValue = val; name = model.Tag.ToString(); ret = index; } index++; } return ret; }
public static HiddenMarkovModel<MultivariateNormalDistribution> deserialize(string filename) { using (StreamReader r = new StreamReader(filename)) { String data = r.ReadToEnd(); String[] dataArr = data.Split('#'); // probDelimitedStr dataArr[0] double[] prob = createSingleDimDoubleArray(dataArr[0], '|'); // transDelimitedStr dataArr[1] double[,] trans = createDoubleDimDoubleArray(dataArr[1], '|', '*'); // emissionsDelimitedStr dataArr[2] String[] emissions = dataArr[2].Split('$'); MultivariateNormalDistribution[] e2 = new MultivariateNormalDistribution[emissions.Length]; for (int i = 0; i < emissions.Length; i++) { String[] meansNCovariance = emissions[i].Split('&'); String meansStr = meansNCovariance[0]; String covarianceStr = meansNCovariance[1]; double[] means = createSingleDimDoubleArray(meansStr, '|'); double[,] covariance = createDoubleDimDoubleArray(covarianceStr, '|', '*'); MultivariateNormalDistribution dist = new MultivariateNormalDistribution(means, covariance); e2[i] = dist; } HiddenMarkovModel<MultivariateNormalDistribution> hmm = new HiddenMarkovModel<MultivariateNormalDistribution>(trans, e2, prob); return hmm; } }
public HMMGenerator(PatchNames instrument) { this.book = new Codebook<Note>(); this.instrument = instrument; DotNetLearn.Data.SampleSet asdasd; Accord.Math.Tools.SetupGenerator(10); // Consider some phrases: // string[][] phrases = { "The Big Brown Fox Jumps Over the Ugly Dog".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "This is too hot to handle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "I am flying away like a gold eagle".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Onamae wa nan desu ka".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "And then she asked, why is it so small?".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Great stuff John! Now you will surely be promoted".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), "Jayne was taken aback when she found out her son was gay".Split(new char[]{' '}, StringSplitOptions.RemoveEmptyEntries), }; // Let's begin by transforming them to sequence of // integer labels using a codification codebook: var codebook = new Codification("Words", phrases); // Now we can create the training data for the models: int[][] sequence = codebook.Translate("Words", phrases); // To create the models, we will specify a forward topology, // as the sequences have definite start and ending points. // var topology = new Forward(states: codebook["Words"].Symbols); int symbols = codebook["Words"].Symbols; // We have 7 different words // Create the hidden Markov model HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols); // Create the learning algorithm var teacher = new ViterbiLearning(hmm); // Teach the model about the phrases double error = teacher.Run(sequence); // Now, we can ask the model to generate new samples // from the word distributions it has just learned: // List<int> sample = new List<int>(); int count = 10; sample.Add(hmm.Generate(1)[0]); while(sample.Count < count) { var k = hmm.Predict(sample.ToArray(), 1); sample.AddRange(k); } // And the result will be: "those", "are", "words". string[] result = codebook.Translate("Words", sample.ToArray()); }
public void RunTest() { // Example from // http://www.cs.columbia.edu/4761/notes07/chapter4.3-HMM.pdf int[][] observations = { new int[] { 0,0,0,1,0,0 }, new int[] { 1,0,0,1,0,0 }, new int[] { 0,0,1,0,0,0 }, new int[] { 0,0,0,0,1,0 }, new int[] { 1,0,0,0,1,0 }, new int[] { 0,0,0,1,1,0 }, new int[] { 1,0,0,0,0,0 }, new int[] { 1,0,1,0,0,0 }, }; int[][] paths = { new int[] { 0,0,1,0,1,0 }, new int[] { 1,0,1,0,1,0 }, new int[] { 1,0,0,1,1,0 }, new int[] { 1,0,1,1,1,0 }, new int[] { 1,0,0,1,0,1 }, new int[] { 0,0,1,0,0,1 }, new int[] { 0,0,1,1,0,1 }, new int[] { 0,1,1,1,0,0 }, }; HiddenMarkovModel model = new HiddenMarkovModel(states: 2, symbols: 2); MaximumLikelihoodLearning target = new MaximumLikelihoodLearning(model); target.UseLaplaceRule = false; double logLikelihood = target.Run(observations, paths); var pi = Matrix.Exp(model.Probabilities); var A = Matrix.Exp(model.Transitions); var B = Matrix.Exp(model.Emissions); Assert.AreEqual(0.5, pi[0]); Assert.AreEqual(0.5, pi[1]); Assert.AreEqual(7 / 20.0, A[0, 0], 1e-5); Assert.AreEqual(13 / 20.0, A[0, 1], 1e-5); Assert.AreEqual(14 / 20.0, A[1, 0], 1e-5); Assert.AreEqual(6 / 20.0, A[1, 1], 1e-5); Assert.AreEqual(17 / 25.0, B[0, 0]); Assert.AreEqual(8 / 25.0, B[0, 1]); Assert.AreEqual(19 / 23.0, B[1, 0]); Assert.AreEqual(4 / 23.0, B[1, 1]); Assert.AreEqual(-1.1472359046136624, logLikelihood); }
public static void serialize(HiddenMarkovModel<MultivariateNormalDistribution> model, string filename) { // want model.Probabilities // model.Transitions // model.Emissions /// For each emission em /// want em.Mean /// em.Covariance using (StreamWriter w = new StreamWriter(filename)) { // model.Probabilities double[] prob = model.Probabilities; Console.WriteLine("prob:"); Console.WriteLine(prob[0]); string probDelimitedStr = createDelimitedStringOneDim(prob, '|'); // model Transitions double[,] trans = model.Transitions; string transDelimitedStr = createDelimitedStringTwoDim(trans, '|', '*'); w.Write(probDelimitedStr); Console.WriteLine(probDelimitedStr); w.Write("#"); w.Write(transDelimitedStr); w.Write("#"); // model.Emissions MultivariateNormalDistribution[] multiVNormD = model.Emissions; // prob # trans # emissions[] // emissions[0] $ emissions[1] $ emissions[2] // means[] & covariance[] $ // 1.1|2.2|3.3 & 2.2|4.4*7.7|8.8 $ for (int i = 0; i < multiVNormD.Length; i++) { double[] means = multiVNormD[i].Mean; string meansDelimitedStr = createDelimitedStringOneDim(means, '|'); double[,] covariance = multiVNormD[i].Covariance; string covarianceDelimitedStr = createDelimitedStringTwoDim(covariance, '|', '*'); w.Write(meansDelimitedStr); w.Write("&"); w.Write(covarianceDelimitedStr); // fence post, no $ on last one if (i < multiVNormD.Length - 1) { w.Write("$"); } } } }
/// <summary> /// Computes Forward probabilities for a given hidden Markov model and a set of observations. /// </summary> public static double[,] Forward(HiddenMarkovModel model, int[] observations, out double logLikelihood) { double[] coefficients; double[,] fwd = Forward(model, observations, out coefficients); logLikelihood = 0; for (int i = 0; i < coefficients.Length; i++) logLikelihood += System.Math.Log(coefficients[i]); return fwd; }
public HiddenMarkovModel BuildModel(IList<ISoundSignalReader> signalReaders, string tag, SignalVisitor visitor = null) { var signals = signalReaders; // signals var signalsCount = signals.Count(); List<List<double[]>> samples = new List<List<double[]>>(); var featureUtility = new FeatureUtility(_engineParameters); var meanFeaturesLength = 0.0; for (var signalIndex = 0; signalIndex < signalsCount; signalIndex++) { var signal = signals[signalIndex]; signal.Reset(); var allSignalfeatures = featureUtility.ExtractFeatures(signal, visitor).ToArray(); samples.AddRange(allSignalfeatures); } var featuresInput = new double[samples.Count][][]; for (var index = 0; index < samples.Count; index++) { featuresInput[index] = samples[index].ToArray(); meanFeaturesLength += featuresInput[index].Length; } meanFeaturesLength = meanFeaturesLength/samples.Count; var hmm = new HiddenMarkovModel(_numberOfHiddenStates, _codeBook.Size, false); List<int[]> observables = new List<int[]>(); for (var signalIndex = 0; signalIndex < featuresInput.Length; signalIndex++) // foreach word signal { var points = featuresInput[signalIndex].Select(item => new Point(item)); // convert feature to points var codeItems = _codeBook.Quantize(points.ToArray()); observables.Add(codeItems); } const int iterations = 20000; const double tolerance = 0.0; var viterbiLearning = new ViterbiLearning(hmm) {Iterations = iterations, Tolerance = tolerance}; viterbiLearning.Run(observables.ToArray()); var idProp = new IdentificationProperties { Class = ClassType.Word, MeanFeaturesLength = meanFeaturesLength, Label = tag }; viterbiLearning.Model.Tag = idProp; _models[tag] = viterbiLearning.Model; return viterbiLearning.Model; }
private void learnButton_Click(object sender, RoutedEventArgs e) { ReadSequenceBox(); ReadCharBox(); ReadStateBox(); ReadLikelihoodBox(); int[][] sequenceArray = SequenceListToArray(); hmm = new HiddenMarkovModel(alphabet, states); hmm.Learn(sequenceArray, likelihood); }
/// <summary> /// Deserialization /// </summary> public Gesture(SerializationInfo info, StreamingContext ctxt) { //Get the values from info and assign them to the appropriate properties this.name = (String)info.GetValue("name", typeof(string)); this.sampleDimensionsCount = (int)info.GetValue("sampleDimensionsCount", typeof(int)); this.frameCount = (int)info.GetValue("frameCount", typeof(int)); this.trainingSampleCount = (int)info.GetValue("trainingSampleCount", typeof(int)); this.alphabetCount = (int)info.GetValue("alphabetCount", typeof(int)); this.statesCount = (int)info.GetValue("statesCount", typeof(int)); this.trainingDataKMeans = (KMeans)info.GetValue("trainingDataKMeans", typeof(KMeans)); this.recognitionThreshold = (double)info.GetValue("recognitionThreshold", typeof(double)); this.model = (HiddenMarkovModel)info.GetValue("model", typeof(HiddenMarkovModel)); }
static void runArbitraryDensityHiddenMarkovModelLearningExample() { // Create continuous sequences. // In the sequences below, there seems to be two states, one for values between 0 and 1 and another for values between 5 and 7. // The states seems to be switched on every observation. double[][] observationSequences = new double[][] { new double[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, new double[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }, new double[] { 0.1, 7.0, 0.1, 7.0, 0.2, 5.6 }, }; // Creates a continuous hidden Markov Model with two states organized in a ergoric topology // and an underlying univariate Normal distribution as probability density. var hmm = new HiddenMarkovModel<NormalDistribution>(topology: new Ergodic(states: 2), emissions: new NormalDistribution()); // Configure the learning algorithms to train the sequence classifier // until the difference in the average log-likelihood changes only by as little as 0.0001. var trainer = new BaumWelchLearning<NormalDistribution>(hmm) { Tolerance = 0.0001, Iterations = 0, }; // Fit the model. double averageLogLikelihood = trainer.Run(observationSequences); Console.WriteLine("average log-likelihood for the observations = {0}", averageLogLikelihood); // The log-probability of the sequences learned. double logLik1 = hmm.Evaluate(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }); // -0.12799388666109757. double logLik2 = hmm.Evaluate(new[] { 0.2, 6.2, 0.3, 6.3, 0.1, 5.0 }); // 0.01171157434400194. // The log-probability of an unrelated sequence. double logLik3 = hmm.Evaluate(new[] { 1.1, 2.2, 1.3, 3.2, 4.2, 1.0 }); // -298.7465244473417. // Transform the log-probabilities to actual probabilities. Console.WriteLine("probability = {0}", Math.Exp(logLik1)); // 0.879. Console.WriteLine("probability = {0}", Math.Exp(logLik2)); // 1.011. Console.WriteLine("probability = {0}", Math.Exp(logLik3)); // 0.000. // Ask the model to decode one of the sequences. // The state variable will contain: { 0, 1, 0, 1, 0, 1 }. double logLikelihood = 0.0; int[] path = hmm.Decode(new[] { 0.1, 5.2, 0.3, 6.7, 0.1, 6.0 }, out logLikelihood); Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood); foreach (int state in path) Console.Write("{0},", state); Console.WriteLine("]"); }
public override void Learn(SequenceData trainingData, SequenceData validationData, SequenceData testData) { //hmm = SparseHiddenMarkovModel.FromCompleteGraph(2, trainingData.NumSymbols); hmm = new HiddenMarkovModel(trainingData.NumSymbols, 4); double likelihood = 0.0; double newLikelihood = Double.MinValue; do { //HMMGraph graph = hmm.ToGraph(); HMMGraph graph = ModelConverter.HMM2Graph(hmm); //CutEdges(graph, epsilon); double[] viterbyScores = ComputeViterbyScores(validationData, true); int[] statesToSplit = IdentifyWeakStates(viterbyScores).ToArray(); foreach (int weakPoint in statesToSplit) { SplitState(graph, weakPoint); } WriteLine(String.Format("Added {0} states", statesToSplit.Length)); //WriteLine(String.Format("Removed {0} states", RemoveUnpopularStates(graph, viterbyScores))); //hmm = SparseHiddenMarkovModel.FromGraph(graph); hmm = ModelConverter.Graph2HMM(graph); WriteLine("Running Baum Welch..."); //hmm.Learn(trainingData.GetNonempty(), 0.0, 8); hmm.Learn(trainingData.GetNonempty(), 8); likelihood = newLikelihood; newLikelihood = 0.0; foreach (int[] signal in validationData.GetNonempty()) { newLikelihood += hmm.Evaluate(signal, true); } WriteLine(String.Empty); WriteLine(String.Format("Number of HMM States: {0}", NumberOfStates)); //WriteLine(String.Format("Transition Sparsity; {0}", hmm.TransitionSparsity)); WriteLine(String.Format("Log Likelihood: {0}", newLikelihood)); WriteLine(String.Empty); } while (Math.Abs(newLikelihood - likelihood) > convergenceThreshold); }
public StochasticGenerator(MelodySequence[] seqs) { if(seqs != null && seqs.Length > 0) this.base_seq = seqs[0]; int count = seqs.Length; List<int[]> notes = new List<int[]>(); List<int[]> bayesInputs = new List<int[]>(); List<int> bayesOutputs = new List<int>(); notes_map = new Dictionary<int, int>(); int max_notes = 0; int note = 0; foreach (MelodySequence m in seqs) { Note[] song = m.ToArray(); int[] _notes = new int[song.Length]; int[] _durations = new int[20]; for (int i = 0; i < song.Length; i++) { var unote = song[i].Pitch + song[i].Duration*128; if (notes_map.ContainsKey(unote)) _notes[i] = notes_map[unote]; else { notes_map[unote] = note++; _notes[i] = notes_map[unote]; } } notes.Add(_notes); } max_notes = note; Console.WriteLine("Training Pitches"); pitch_hmm = new HiddenMarkovModel(50, max_notes); var teacher = new BaumWelchLearning(pitch_hmm) { Tolerance = 0.0001, Iterations = 0 }; var __pitches = notes.ToArray(); teacher.Run(__pitches); teacher.Run(__pitches); Console.WriteLine("Done training"); this.MaxGenerations = 2000; }
public static HMMGraph HMM2Graph(HiddenMarkovModel hmm){ HMMGraph g = new HMMGraph(hmm.Symbols); Node[] nodes = new Node[hmm.States]; for (int i = 0; i < hmm.States; i++) { nodes[i] = new Node(); g.AddNode(nodes[i]); } for (int i = 0; i < hmm.States; i++) { nodes[i].InitialProbability = hmm.Probabilities[i]; for (int j = 0; j < hmm.States; j++) nodes[i].SetTransition(nodes[j], hmm.Transitions[i, j]); for (int k = 0; k < hmm.Symbols; k++) nodes[i].SetEmission(k, hmm.Emissions[i, k]); } return g; }
/// <summary> /// Computes Forward probabilities for a given hidden Markov model and a set of observations. /// </summary> public static double[,] Forward(HiddenMarkovModel model, int[] observations, out double[] scaling) { int states = model.States; double[,] A = model.Transitions; double[,] B = model.Emissions; double[] pi = model.Probabilities; int T = observations.Length; var fwd = new double[T,states]; scaling = new double[T]; // 1. Initialization for (int i = 0; i < states; i++) scaling[0] += fwd[0, i] = pi[i]*B[i, observations[0]]; if (scaling[0] != 0) // Scaling { for (int i = 0; i < states; i++) fwd[0, i] /= scaling[0]; } // 2. Induction for (int t = 1; t < T; t++) { int obs = observations[t]; for (int i = 0; i < states; i++) { double sum = 0.0; for (int j = 0; j < states; j++) sum += fwd[t - 1, j]*A[j, i]; fwd[t, i] = sum*B[i, obs]; scaling[t] += fwd[t, i]; // scaling coefficient } if (scaling[t] != 0) // Scaling { for (int i = 0; i < states; i++) fwd[t, i] /= scaling[t]; } } return fwd; }
public void UniformTest() { // Create a new Ergodic hidden Markov model with three // fully-connected states and four sequence symbols. var model = new HiddenMarkovModel(new Ergodic(3), 4); var expected = new double[,] { { 0.33, 0.33, 0.33 }, { 0.33, 0.33, 0.33 }, { 0.33, 0.33, 0.33 }, }; var A = Matrix.Exp(model.Transitions); Assert.AreEqual(model.States, 3); Assert.IsTrue(A.IsEqual(expected, 0.01)); }
static void runArbitraryDensityHiddenMarkovModelExample() { // Create the transition matrix A. double[,] transitions = { { 0.7, 0.3 }, { 0.4, 0.6 } }; // Create the vector of emission densities B. GeneralDiscreteDistribution[] emissions = { new GeneralDiscreteDistribution(0.1, 0.4, 0.5), new GeneralDiscreteDistribution(0.6, 0.3, 0.1) }; // Create the initial probabilities pi. double[] initial = { 0.6, 0.4 }; // Create a new hidden Markov model with discrete probabilities. var hmm = new HiddenMarkovModel<GeneralDiscreteDistribution>(transitions, emissions, initial); // Query the probability of a sequence occurring. We will consider the sequence. double[] sequence = new double[] { 0, 1, 2 }; // Evaluate its likelihood. double logLikelihood = hmm.Evaluate(sequence); // The log-likelihood of the sequence occurring within the model is -3.3928721329161653. Console.WriteLine("log-likelihood = {0}", logLikelihood); // Get the Viterbi path of the sequence. int[] path = hmm.Decode(sequence, out logLikelihood); // The state path will be 1-0-0 and the log-likelihood will be -4.3095199438871337. Console.Write("log-likelihood = {0}, Viterbi path = [", logLikelihood); foreach (int state in path) Console.Write("{0},", state); Console.WriteLine("]"); }
public static HiddenMarkovModel CreateModel1() { double[] initial = { 1.0, 0.0 }; double[,] transitions = { { 1 / 3.0, 2 / 3.0 }, { 0.00, 1.00 }, }; double[,] emissions = { { 0.25, 0.25, 0.50 }, { 0.05, 0.05, 0.90 } }; HiddenMarkovModel model = new HiddenMarkovModel(transitions, emissions, initial); return model; }
private static HiddenMarkovModel createHMM() { double[] initial = { 1.0, 0.0 }; double[,] transitions = { { 0.33, 0.66 }, { 0.00, 1.00 }, }; double[,] emissions = { { 0.25, 0.25, 0.50 }, { 0.05, 0.05, 0.90 } }; HiddenMarkovModel model = new HiddenMarkovModel(transitions, emissions, initial); return model; }
/*public static void SaveSequenceList(SequenceList seqList, string path) { Stream writeStream = new FileStream(path, FileMode.Create, FileAccess.Write, FileShare.None); seqList.Save(writeStream); writeStream.Close(); } public static SequenceList LoadSequenceList(string path) { Stream readStream = new FileStream(path, FileMode.Open, FileAccess.Read, FileShare.Read); SequenceList seqList = SequenceList.Load(readStream); readStream.Close(); return seqList; }*/ public static HiddenMarkovModel<MultivariateNormalDistribution> CreateModelFromFrames(List<List<Frame>> frames) { SequenceList sequences = Utils.FramesToSequenceList(frames); HiddenMarkovModel<MultivariateNormalDistribution> hmm; MultivariateNormalDistribution mnd = new MultivariateNormalDistribution(sequences.GetDimensions()); hmm = new HiddenMarkovModel<MultivariateNormalDistribution>(new Forward(5), mnd); var teacher = new BaumWelchLearning<MultivariateNormalDistribution>(hmm); teacher.Tolerance = 0.0001; teacher.Iterations = 0; teacher.FittingOptions = new NormalOptions() { Diagonal = true, // only diagonal covariance matrices Regularization = 1e-5 // avoid non-positive definite errors }; teacher.Run(sequences.GetArray()); return hmm; }
private static bool[] FindReachableNodes(HiddenMarkovModel hmm) { bool[] reachable = hmm.Probabilities.Select(p => (p != 0)).ToArray(); List<int> newlyReached = new List<int>(); for (int i = 0; i < hmm.States; i++) if (reachable[i]) newlyReached.Add(i); while (newlyReached.Count > 0) { List<int> nextNewlyReached = new List<int>(); foreach (int i in newlyReached) { for (int j = 0; j < hmm.States; j++) if (hmm.Transitions[i, j] != 0 && !reachable[i]) { nextNewlyReached.Add(j); reachable[j] = true; } } newlyReached = nextNewlyReached; } return reachable; }
public MarkovGenerator(MelodySequence[] seqs, PatchNames instrument = PatchNames.Acoustic_Grand) { this.instrument = instrument; int count = seqs.Length; note_map = new Dictionary<Note, int>(); int pitch = 0; int j = 0; pitches = new int[seqs.Length][]; foreach (MelodySequence m in seqs) { Note[] song = m.ToArray(); pitches[j] = new int[m.Length]; for (int i = 0; i < song.Length; i++) { if (note_map.ContainsKey(song[i])) { } else { note_map[song[i]] = pitch++; } pitches[j][i] = note_map[song[i]]; } j++; if (m.Length > max_length) max_length = m.Length; } hmm = new HiddenMarkovModel(3, pitch+1); var teacher = new BaumWelchLearning(hmm); teacher.Iterations = 10000; teacher.Run(pitches); Console.WriteLine("Done training"); }
public void LearnTest4() { int[][] sequences = new int[][] { new int[] { 0, 3, 1 }, new int[] { 0, 2 }, new int[] { 1, 0, 3 }, new int[] { 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 3, 4 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 5 }, new int[] { 0, 1, 3, 4, 5 }, }; HiddenMarkovModel hmm = new HiddenMarkovModel(3, 6); var teacher = new ViterbiLearning(hmm) { Iterations = 100, Tolerance = 0 }; double ll = teacher.Run(sequences); double l0; hmm.Decode(sequences[0], out l0); double l1; hmm.Decode(sequences[1], out l1); double l2; hmm.Decode(sequences[2], out l2); double pl = System.Math.Exp(ll); double p0 = System.Math.Exp(l0); double p1 = System.Math.Exp(l1); double p2 = System.Math.Exp(l2); Assert.AreEqual(0.078050218613091762, pl, 1e-10); Assert.AreEqual(0.008509757587448558, p0, 1e-10); Assert.AreEqual(0.010609567901234561, p1, 1e-10); Assert.AreEqual(0.008509757587448558, p2, 1e-10); }
public void LearnTest() { HiddenMarkovModel hmm = new HiddenMarkovModel(2, 3); int[] observation = new int[] { 0,1,1,2,2,1,1,1,0,0,0,0,0,0,0,0,2,2,0,0,1,1,1,2,0,0, 0,0,0,0,1,2,1,1,1,0,2,0,1,0,2,2,2,0,0,2,0,1,2,2,0,1, 1,2,2,2,0,0,1,1,2,2,0,0,2,2,0,0,1,0,1,2,0,0,0,0,2,0, 2,0,1,1,0,1,0,0,0,1,2,1,1,2,0,2,0,2,2,0,0,1 }; int[] observation2 = new int[] { 0,1,0,0,2,1,1,0,0,2,1,0,1,1,2,0,1,1,1,0,0,2,0,0,2,1, 1,1,2,0,2,2,1,0,1,2,0,2,1,0,2,1,1,2,0,1,0,1,1,0,1,2, 1,0,2,0,1,0,1,2,0,0,2,0,2,0,0,1,0,0,0,0,1,1,2,2,1,2, 0,1,1,1,2,2,1,1,1,2,2,0,2,1,1,2,0,0,1,1,1,1,1,1,1,0, 0,1,0,1,0,1,0,0,2,0,1,0,2,0,0,0,0,1,1,1,1,1,1,0,2,0, 2,2,1,2,1,2,1,0,2,1,1,2,1,2,1,0,0,2,0,0,2,2,2,0,0,1, 0,1,0,1,0,1,0,0,0,0,0,1,1,1,2,0,0,0,0,0,0,2,2,0,0,0, 0,0,1,0,2,2,2,2,2,1,2,0,1,0,1,2,2,1,0,1,1,2,1,1,1,2, 2,2,0,1,1,1,1,2,1,0,1,0,1,1,0,2,2,2,1,1,1,1,0,2,1,0, 2,1,1,1,2,0,0,1,1,1,1,2,1,1,2,0,0,0,0,0,2,2,2,0,1,1, 1,0,1,0,0,0,0,2,2,2,2,0,1,1,0,1,2,1,2,1,1,0,0,0,0,2, 2,1,1,0,1,0,0,0,0,1,0,0,0,2,0,0,0,2,1,2,2,0,0,0,0,0, 0,2,0,0,2,0,0,0,2,0,1,1,2,2,1,2,1,2,0,0,0,0,2,0,2,0, 1,0,0,2,2,1,2,1,2,2,0,1,1,1,0,0,1,1,1,2,1,0,0,2,0,0, 0,0,1,2,0,0,1,2,0,0,0,2,1,1,1,1,1,2,2,0,0,1,1,1,0,0, 2,0,1,1,0,2,2,0,0,0,1,1,1,1,1,1,2,1,1,0,2,0,0,0,1,1, 1,2,1,0,0,0,1,1,0,1,1,1,0,0,0,1,1,1,2,2,2,0,2,0,2,1, 2,1,0,2,1,2,1,0,0,2,1,1,1,1,0,0,0,1,2,0,2,2,1,2,1,1, 1,0,1,0,0,0,0,2,0,1,1,1,0,2,0,1,0,2,1,2,2,0,2,1,0,0, 2,1,2,2,0,2,1,2,1,2,0,0,0,1,2,1,2,2,1,0,0,0,1,1,2,0, 2,1,0,0,0,1,0,0,1,2,0,0,1,2,2,2,0,1,2,0,1,0,1,0,2,2, 0,2,0,1,1,0,1,1,1,2,2,0,0,0,0,0,1,1,0,0,2,0,0,1,0,0, 1,0,2,1,1,1,1,1,2,0,0,2,0,1,2,0,1,1,1,2,0,0,0,1,2,0, 0,0,2,2,1,1,1,0,1,1,0,2,2,0,1,2,2,1,1,1,2,1,0,2,0,0, 1,1,1,1,1,1,2,1,2,1,0,1,0,2,2,0,1,2,1,1,2,1,0,1,2,1 }; var teacher = new ViterbiLearning(hmm) { Iterations = 650, Tolerance = 0 }; double ll = teacher.Run(observation); double[] pi = { 0.66, 0.33 }; double[,] A = { { 0.99, 0.01 }, { 0.50, 0.50 } }; double[,] B = { { 0.44, 0.27, 0.28 }, { 0.33, 0.33, 0.33 } }; var hmmA = Matrix.Exp(hmm.Transitions); var hmmB = Matrix.Exp(hmm.Emissions); var hmmP = Matrix.Exp(hmm.Probabilities); Assert.IsTrue(Matrix.IsEqual(A, hmmA, 0.1)); Assert.IsTrue(Matrix.IsEqual(B, hmmB, 0.1)); Assert.IsTrue(Matrix.IsEqual(pi, hmmP, 0.1)); }
public void LearnTest6() { // We will try to create a Hidden Markov Model which // can detect if a given sequence starts with a zero // and has any number of ones after that. int[][] sequences = new int[][] { new int[] { 0,1,1,1,1,0,1,1,1,1 }, new int[] { 0,1,1,1,0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, }; // Creates a new Hidden Markov Model with 3 states for // an output alphabet of two characters (zero and one) HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(3), 2); // Try to fit the model to the data until the difference in // the average log-likelihood changes only by as little as 0.0001 var teacher = new ViterbiLearning(hmm) { Tolerance = 0.0001, Iterations = 0 }; double ll = teacher.Run(sequences); // Calculate the probability that the given // sequences originated from the model double l1 = hmm.Evaluate(new int[] { 0, 1 }); // 0.613 double l2 = hmm.Evaluate(new int[] { 0, 1, 1, 1 }); // 0.500 // Sequences which do not start with zero have much lesser probability. double l3 = hmm.Evaluate(new int[] { 1, 1 }); // 0.186 double l4 = hmm.Evaluate(new int[] { 1, 0, 0, 0 }); // 0.003 // Sequences which contains few errors have higher probability // than the ones which do not start with zero. This shows some // of the temporal elasticity and error tolerance of the HMMs. double l5 = hmm.Evaluate(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }); // 0.033 double l6 = hmm.Evaluate(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }); // 0.026 double pl = System.Math.Exp(ll); double p1 = System.Math.Exp(l1); double p2 = System.Math.Exp(l2); double p3 = System.Math.Exp(l3); double p4 = System.Math.Exp(l4); double p5 = System.Math.Exp(l5); double p6 = System.Math.Exp(l6); Assert.AreEqual(1.754393540912413, pl, 1e-6); Assert.AreEqual(0.61368718756104801, p1, 1e-6); Assert.AreEqual(0.50049466955818356, p2, 1e-6); Assert.AreEqual(0.18643340385264684, p3, 1e-6); Assert.AreEqual(0.00300262431355424, p4, 1e-6); Assert.AreEqual(0.03338686211012481, p5, 1e-6); Assert.AreEqual(0.02659161933179825, p6, 1e-6); Assert.IsTrue(l1 > l3 && l1 > l4); Assert.IsTrue(l2 > l3 && l2 > l4); }
public void LearnTest3() { // We will try to create a Hidden Markov Model which // can detect if a given sequence starts with a zero // and has any number of ones after that. int[][] sequences = new int[][] { new int[] { 0,1,1,1,1,0,1,1,1,1 }, new int[] { 0,1,1,1,0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, new int[] { 0,1,1,1,1,1,1,1,1,1 }, }; // Creates a new Hidden Markov Model with 3 states for // an output alphabet of two characters (zero and one) HiddenMarkovModel hmm = new HiddenMarkovModel(new Forward(3), 2); // Try to fit the model to the data until the difference in // the average log-likelihood changes only by as little as 0.0001 var teacher = new ViterbiLearning(hmm) { Tolerance = 0.0001, Iterations = 0 }; double ll = teacher.Run(sequences); // Calculate the probability that the given // sequences originated from the model double l1; hmm.Decode(new int[] { 0, 1 }, out l1); // 0.5394 double l2; hmm.Decode(new int[] { 0, 1, 1, 1 }, out l2); // 0.4485 // Sequences which do not start with zero have much lesser probability. double l3; hmm.Decode(new int[] { 1, 1 }, out l3); // 0.0864 double l4; hmm.Decode(new int[] { 1, 0, 0, 0 }, out l4); // 0.0004 // Sequences which contains few errors have higher probability // than the ones which do not start with zero. This shows some // of the temporal elasticity and error tolerance of the HMMs. double l5; hmm.Decode(new int[] { 0, 1, 0, 1, 1, 1, 1, 1, 1 }, out l5); // 0.0154 double l6; hmm.Decode(new int[] { 0, 1, 1, 1, 1, 1, 1, 0, 1 }, out l6); // 0.0154 ll = System.Math.Exp(ll); l1 = System.Math.Exp(l1); l2 = System.Math.Exp(l2); l3 = System.Math.Exp(l3); l4 = System.Math.Exp(l4); l5 = System.Math.Exp(l5); l6 = System.Math.Exp(l6); Assert.AreEqual(1.754393540912413, ll, 1e-6); Assert.AreEqual(0.53946360153256712, l1, 1e-6); Assert.AreEqual(0.44850249229903377, l2, 1e-6); Assert.AreEqual(0.08646414524833077, l3, 1e-6); Assert.AreEqual(0.00041152263374485, l4, 1e-6); Assert.AreEqual(0.01541807695931400, l5, 1e-6); Assert.AreEqual(0.01541807695931400, l6, 1e-6); Assert.IsTrue(l1 > l3 && l1 > l4); Assert.IsTrue(l2 > l3 && l2 > l4); }
public void LikelihoodTest() { var hmm = DiscreteHiddenMarkovModelFunctionTest.CreateModel2(); int states = hmm.States; int symbols = hmm.Symbols; var hcrf = new ConditionalRandomField<int>(states, new MarkovDiscreteFunction(hmm)); var hmm0 = new HiddenMarkovModel(states, symbols); var hcrf0 = new ConditionalRandomField<int>(states, new MarkovDiscreteFunction(hmm0)); int[] observations = new int[] { 0, 0, 1, 1, 1, 2 }; double la = hcrf.LogLikelihood(observations, observations); double lb = hcrf0.LogLikelihood(observations, observations); Assert.IsTrue(la > lb); double lc = hmm.Evaluate(observations, observations); double ld = hmm0.Evaluate(observations, observations); Assert.IsTrue(lc > ld); double za = hcrf.LogPartition(observations); double zb = hcrf0.LogPartition(observations); la += za; lb += zb; Assert.AreEqual(la, lc, 1e-6); Assert.AreEqual(lb, ld, 1e-6); }
/// <summary> /// Initializes a new instance of the <see cref="HiddenMarkovDistribution"/> class. /// </summary> /// /// <param name="model">The model.</param> /// public HiddenMarkovDistribution(HiddenMarkovModel model) : base(0) { this.model = model; }
public void GenerateTest2() { Accord.Math.Tools.SetupGenerator(42); // Consider some phrases: // string[][] phrases = { new[] { "those", "are", "sample", "words", "from", "a", "dictionary" }, new[] { "those", "are", "sample", "words" }, new[] { "sample", "words", "are", "words" }, new[] { "those", "words" }, new[] { "those", "are", "words" }, new[] { "words", "from", "a", "dictionary" }, new[] { "those", "are", "words", "from", "a", "dictionary" } }; // Let's begin by transforming them to sequence of // integer labels using a codification codebook: var codebook = new Codification("Words", phrases); // Now we can create the training data for the models: int[][] sequence = codebook.Translate("Words", phrases); // To create the models, we will specify a forward topology, // as the sequences have definite start and ending points. // var topology = new Forward(states: 4); int symbols = codebook["Words"].Symbols; // We have 7 different words // Create the hidden Markov model HiddenMarkovModel hmm = new HiddenMarkovModel(topology, symbols); // Create the learning algorithm BaumWelchLearning teacher = new BaumWelchLearning(hmm); // Teach the model about the phrases double error = teacher.Run(sequence); // Now, we can ask the model to generate new samples // from the word distributions it has just learned: // int[] sample = hmm.Generate(3); // And the result will be: "those", "are", "words". string[] result = codebook.Translate("Words", sample); Assert.AreEqual("those", result[0]); Assert.AreEqual("are", result[1]); Assert.AreEqual("words", result[2]); }
public static HiddenMarkovModel <TDistribution> Load <TDistribution>(string path) where TDistribution : IDistribution { return(HiddenMarkovModel <TDistribution> .Load(path)); }
/// <summary> /// Computes Forward probabilities for a given hidden Markov model and a set of observations. /// </summary> public static void Forward <TDistribution>(HiddenMarkovModel <TDistribution> model, double[][] observations, double[] scaling, double[,] fwd) where TDistribution : IDistribution { int states = model.States; var A = Matrix.Exp(model.Transitions); var B = model.Emissions; var pi = Matrix.Exp(model.Probabilities); int T = observations.Length; double s = 0; // Ensures minimum requirements System.Diagnostics.Debug.Assert(fwd.GetLength(0) >= T); System.Diagnostics.Debug.Assert(fwd.GetLength(1) == states); System.Diagnostics.Debug.Assert(scaling.Length >= T); Array.Clear(fwd, 0, fwd.Length); // 1. Initialization for (int i = 0; i < states; i++) { s += fwd[0, i] = pi[i] * B[i].ProbabilityFunction(observations[0]); } scaling[0] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[0, i] /= s; } } // 2. Induction for (int t = 1; t < T; t++) { double[] obs = observations[t]; s = 0; for (int i = 0; i < states; i++) { double sum = 0.0; for (int j = 0; j < states; j++) { sum += fwd[t - 1, j] * A[j, i]; } fwd[t, i] = sum * B[i].ProbabilityFunction(obs); s += fwd[t, i]; // scaling coefficient } scaling[t] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[t, i] /= s; } } } System.Diagnostics.Debug.Assert(!fwd.HasNaN()); }
public static HiddenMarkovModel <TDistribution> Load <TDistribution>(Stream stream) where TDistribution : IDistribution { return(HiddenMarkovModel <TDistribution> .Load(stream)); }
/// <summary> /// Computes Forward probabilities for a given hidden Markov model and a set of observations. /// </summary> /// public static void Forward(HiddenMarkovModel model, int[] observations, double[] scaling, double[,] fwd) { int states = model.States; var A = Matrix.Exp(model.Transitions); var B = Matrix.Exp(model.Emissions); var pi = Matrix.Exp(model.Probabilities); int T = observations.Length; double s = 0; // Ensures minimum requirements System.Diagnostics.Debug.Assert(fwd.GetLength(0) >= T); System.Diagnostics.Debug.Assert(fwd.GetLength(1) == states); System.Diagnostics.Debug.Assert(scaling.Length >= T); Array.Clear(fwd, 0, fwd.Length); // 1. Initialization for (int i = 0; i < states; i++) { s += fwd[0, i] = pi[i] * B[i, observations[0]]; } scaling[0] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[0, i] /= s; } } // 2. Induction for (int t = 1; t < T; t++) { int obs = observations[t]; s = 0; for (int i = 0; i < states; i++) { double sum = 0.0; for (int j = 0; j < states; j++) { sum += fwd[t - 1, j] * A[j, i]; } fwd[t, i] = sum * B[i, obs]; s += fwd[t, i]; // scaling coefficient } scaling[t] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[t, i] /= s; } } } #if DEBUG foreach (var value in fwd) { if (Double.IsNaN(value)) { throw new Exception(); } } #endif }
/// <summary> /// Computes Forward probabilities for a given hidden Markov model and a set of observations. /// </summary> /// public static void Forward(HiddenMarkovModel model, int[] observations, double[] scaling, double[,] fwd) { int states = model.States; var A = model.LogTransitions.Exp(); var B = model.Emissions.Exp(); var pi = model.Probabilities.Exp(); int T = observations.Length; double s = 0; // Ensures minimum requirements Accord.Diagnostics.Debug.Assert(fwd.GetLength(0) >= T); Accord.Diagnostics.Debug.Assert(fwd.GetLength(1) == states); Accord.Diagnostics.Debug.Assert(scaling.Length >= T); Array.Clear(fwd, 0, fwd.Length); // 1. Initialization for (int i = 0; i < states; i++) { s += fwd[0, i] = pi[i] * B[i, observations[0]]; } scaling[0] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[0, i] /= s; } } // 2. Induction for (int t = 1; t < T; t++) { int obs = observations[t]; s = 0; for (int i = 0; i < states; i++) { double sum = 0.0; for (int j = 0; j < states; j++) { sum += fwd[t - 1, j] * A[j][i]; } fwd[t, i] = sum * B[i, obs]; s += fwd[t, i]; // scaling coefficient } scaling[t] = s; if (s != 0) // Scaling { for (int i = 0; i < states; i++) { fwd[t, i] /= s; } } } Accord.Diagnostics.Debug.Assert(!fwd.HasNaN()); }