public void RunTest() { Accord.Math.Random.Generator.Seed = 0; int nstates = 3; int symbols = 3; int[][] sequences = new int[][] { new int[] { 0, 1, 1, 1, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 0, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 2, 2 }, new int[] { 0, 0, 1, 1, 1, 2, 2 }, new int[] { 0, 0, 0, 1, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 2, 2, 2 }, }; var function = new MarkovDiscreteFunction(nstates, symbols, new NormalDistribution()); var model = new ConditionalRandomField <int>(nstates, function); for (int i = 0; i < sequences.Length; i++) { double p; int[] s = sequences[i]; int[] r = model.Compute(s, out p); Assert.IsFalse(s.IsEqual(r)); } var target = new QuasiNewtonLearning <int>(model); target.ParallelOptions.MaxDegreeOfParallelism = 1; int[][] labels = sequences; int[][] observations = sequences; double ll0 = model.LogLikelihood(observations, labels); double actual = target.Run(observations, labels); double ll1 = model.LogLikelihood(observations, labels); Assert.IsTrue(ll1 > ll0); Assert.AreEqual(-0.0010766857305242183, actual, 1e-6); for (int i = 0; i < sequences.Length; i++) { double p; int[] s = sequences[i]; int[] r = model.Compute(s, out p); Assert.IsTrue(s.IsEqual(r)); } }
public void RunTest() { int nstates = 3; int symbols = 3; int[][] sequences = new int[][] { new int[] { 0, 1, 1, 1, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 0, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 2, 2 }, new int[] { 0, 1, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 2, 2 }, new int[] { 0, 0, 1, 1, 1, 2, 2 }, new int[] { 0, 0, 0, 1, 1, 1, 2, 2 }, new int[] { 0, 1, 1, 2, 2, 2 }, }; var function = new MarkovDiscreteFunction(nstates, symbols); var model = new ConditionalRandomField <int>(nstates, function); for (int i = 0; i < sequences.Length; i++) { double p; int[] s = sequences[i]; int[] r = model.Compute(s, out p); Assert.IsFalse(s.IsEqual(r)); } var target = new QuasiNewtonLearning <int>(model); int[][] labels = sequences; int[][] observations = sequences; double ll0 = model.LogLikelihood(observations, labels); double actual = target.Run(observations, labels); double ll1 = model.LogLikelihood(observations, labels); Assert.IsTrue(ll1 > ll0); Assert.AreEqual(0, actual, 1e-8); for (int i = 0; i < sequences.Length; i++) { double p; int[] s = sequences[i]; int[] r = model.Compute(s, out p); Assert.IsTrue(s.IsEqual(r)); } }
public void learn_test() { #region doc_learn Accord.Math.Random.Generator.Seed = 0; int[][] input = { new int[] { 0, 1, 1, 1, 0, 0 }, new int[] { 0, 1, 1,0 }, new int[] { 0, 1, 1, 0, 0, 0 }, new int[] { 0, 1, 1, 1, 1, 0 }, new int[] { 0, 1, 1, 1, 0,0, 0, 0 }, new int[] { 0, 1, 1, 1, 0, 0 }, new int[] { 0, 1, 1,0 }, new int[] { 0, 1, 1, 1,0 }, }; int[][] output = { new int[] { 0, 0, 1, 1, 1, 2 }, new int[] { 0, 0, 1,2 }, new int[] { 0, 0, 1, 1, 1, 2 }, new int[] { 0, 0, 1, 1, 1, 2 }, new int[] { 0, 0, 1, 1, 1,1, 2, 2 }, new int[] { 0, 0, 1, 1, 1, 2 }, new int[] { 0, 0, 1,2 }, new int[] { 0, 1, 1, 1,2 }, }; // Create a new L-BFGS learning algorithm var teacher = new QuasiNewtonLearning <int>() { Function = new MarkovDiscreteFunction(states: 3, symbols: 2, initialization: new NormalDistribution()) }; // Learn the Conditional Random Field model ConditionalRandomField <int> crf = teacher.Learn(input, output); // Use the model to classify the samples int[][] prediction = crf.Decide(input); var cm = new ConfusionMatrix(predicted: prediction.Reshape(), expected: output.Reshape()); double acc = cm.Accuracy; #endregion Assert.IsTrue(acc > 0.7); }
public void chunking_dataset_crf() { Chunking chunking = new Chunking(path: Path.GetTempPath()); // Learn a mapping between each word to an integer class label: var wordMap = new Codification().Learn(chunking.Words); // Learn a mapping between each tag to an integer class labels: var tagMap = new Codification().Learn(chunking.Tags); // Convert the training and testing sets into integer labels: int[][] trainX = wordMap.Transform(chunking.Training.Item1); int[][] testX = wordMap.Transform(chunking.Testing.Item1); // Convert the training and testing tags into integer labels: int[][] trainY = tagMap.Transform(chunking.Training.Item2); int[][] testY = tagMap.Transform(chunking.Testing.Item2); int numberOfClasses = chunking.Tags.Length; int numberOfSymbols = chunking.Words.Length; // Learn one Markov model using the training data var teacher = new QuasiNewtonLearning <int>() { Function = new MarkovDiscreteFunction(states: numberOfClasses, symbols: numberOfSymbols) }; // Use the teacher to learn a Conditional Random Field model ConditionalRandomField <int> crf = teacher.Learn(trainX, trainY); // Use the crf to predict instances: int[][] predY = crf.Decide(testX); // Check the accuracy of the model: var cm = new ConfusionMatrix( predicted: predY.Concatenate(), expected: testY.Concatenate()); double acc = cm.Accuracy; Assert.AreEqual(0.99983114169322662, acc, 1e-10); }
private static void crf(int[][] trainX, int[][] trainY, int[][] testX, int[][] testY) { int numberOfClasses = 44; // chunking.Tags.Length; int numberOfSymbols = 21589; // chunking.Words.Length; // Learn one Markov model using the training data var teacher = new QuasiNewtonLearning <int>() { Function = new MarkovDiscreteFunction(states: numberOfClasses, symbols: numberOfSymbols) }; // Use the teacher to learn a Conditional Random Field model ConditionalRandomField <int> crf = teacher.Learn(trainX, trainY); // Use the crf to predict instances: int[][] predY = crf.Decide(testX); // Check the accuracy of the model: var cm = new ConfusionMatrix(predicted: predY.Concatenate(), expected: testY.Concatenate()); double acc = cm.Accuracy; }