public void HiddenConditionalRandomFieldConstructorTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var target = new HiddenConditionalRandomField<int>(function); Assert.AreEqual(function, target.Function); Assert.AreEqual(2, target.Function.Factors[0].States); }
public void ComputeTest() { HiddenMarkovClassifier hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,0,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,0 }, // Class 0 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; var function = new MarkovDiscreteFunction(hmm); var target = new HiddenConditionalRandomField<int>(function); for (int i = 0; i < inputs.Length; i++) { int expected = hmm.Compute(inputs[i]); int actual = target.Compute(inputs[i]); double h0 = hmm.LogLikelihood(inputs[i], 0); double h1 = hmm.LogLikelihood(inputs[i], 1); double c0 = target.LogLikelihood(inputs[i], 0); double c1 = target.LogLikelihood(inputs[i], 1); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.AreEqual(h1, c1, 1e-10); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); } }
public void RunTest() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new HiddenGradientDescentLearning<int>(model); target.LearningRate = 1000; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double ll0 = model.LogLikelihood(inputs, outputs); double error = Double.NegativeInfinity; for (int i = 0; i < 50; i++) error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00027018722449589916, error, 1e-10); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void RunTest() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; HiddenMarkovClassifier hmm = DiscreteHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new HiddenConjugateGradientLearning<int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double ll0 = model.LogLikelihood(inputs, outputs); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.0019419916698781847, ll0, 1e-10); Assert.AreEqual(0.00050271005636426391, error, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void RunTest() { var hmm = MarkovContinuousFunctionTest.CreateModel1(); var function = new MarkovContinuousFunction(hmm); var model = new HiddenConditionalRandomField<double>(function); var target = new HiddenQuasiNewtonLearning<double>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double llm = hmm.LogLikelihood(inputs, outputs); double ll0 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(llm, ll0, 1e-10); Assert.IsFalse(Double.IsNaN(llm)); Assert.IsFalse(Double.IsNaN(ll0)); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(-ll1, error, 1e-10); Assert.IsFalse(Double.IsNaN(ll1)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.0000041736023099758768, ll0, 1e-10); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void RunTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new HiddenQuasiNewtonLearning<int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double ll0 = model.LogLikelihood(inputs, outputs); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.0, error, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void GradientTest_DiscreteMarkov() { var function = new MarkovDiscreteFunction(2, 2, 2); var model = new HiddenConditionalRandomField <int>(function); var target = new ForwardBackwardGradient <int>(model); FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; diff.Function = parameters => func(model, parameters, inputs, outputs); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-4); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
private double func(HiddenConditionalRandomField <double[]> model, double[] parameters, double[][][] inputs, int[] outputs, double beta) { model.Function.Weights = parameters; // Regularization double sumSquaredWeights = 0; if (beta != 0) { for (int i = 0; i < parameters.Length; i++) { if (!(Double.IsInfinity(parameters[i]) || Double.IsNaN(parameters[i]))) { sumSquaredWeights += parameters[i] * parameters[i]; } } sumSquaredWeights = sumSquaredWeights * 0.5 / beta; } double logLikelihood = model.LogLikelihood(inputs, outputs); // Maximize the log-likelihood and minimize the sum of squared weights return(-logLikelihood + sumSquaredWeights); }
public void SimpleGestureRecognitionTest() { // Let's say we would like to do a very simple mechanism for // gesture recognition. In this example, we will be trying to // create a classifier that can distinguish between the words // "hello", "car", and "wardrobe". // Let's say we decided to acquire some data, and we asked some // people to perform those words in front of a Kinect camera, and, // using Microsoft's SDK, we were able to captured the x and y // coordinates of each hand while the word was being performed. // Let's say we decided to represent our frames as: // // double[] frame = { leftHandX, leftHandY, rightHandX, rightHandY }; // // Since we captured words, this means we captured sequences of // frames as we described above. Let's write some of those as // rough examples to explain how gesture recognition can be done: double[][] hello = { new double[] { 1.0, 0.1, 0.0, 0.0 }, // let's say the word new double[] { 0.0, 1.0, 0.1, 0.1 }, // hello took 6 frames new double[] { 0.0, 1.0, 0.1, 0.1 }, // to be recorded. new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 1.0, 0.0 }, new double[] { 0.0, 0.0, 0.1, 1.1 }, }; double[][] car = { new double[] { 0.0, 0.0, 0.0, 1.0 }, // the car word new double[] { 0.1, 0.0, 1.0, 0.1 }, // took only 4. new double[] { 0.0, 0.0, 0.1, 0.0 }, new double[] { 1.0, 0.0, 0.0, 0.0 }, }; double[][] wardrobe = { new double[] { 0.0, 0.0, 1.0, 0.0 }, // same for the new double[] { 0.1, 0.0, 1.0, 0.1 }, // wardrobe word. new double[] { 0.0, 0.1, 1.0, 0.0 }, new double[] { 0.1, 0.0, 1.0, 0.1 }, }; // Here, please note that a real-world example would involve *lots* // of samples for each word. Here, we are considering just one from // each class which is clearly sub-optimal and should _never_ be done // on practice. For example purposes, however, please disregard this. // Those are the words we have in our vocabulary: // double[][][] words = { hello, car, wardrobe }; // Now, let's associate integer labels with them. This is needed // for the case where there are multiple samples for each word. // int[] labels = { 0, 1, 2 }; // We will create our classifiers assuming an independent // Gaussian distribution for each component in our feature // vectors (like assuming a Naive Bayes assumption). var initial = new Independent <NormalDistribution> ( new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1), new NormalDistribution(0, 1) ); // Now, we can proceed and create our classifier. // int numberOfWords = 3; // we are trying to distinguish between 3 words int numberOfStates = 5; // this value can be found by trial-and-error var hmm = new HiddenMarkovClassifier <Independent <NormalDistribution> > ( classes: numberOfWords, topology: new Forward(numberOfStates), // word classifiers should use a forward topology initial: initial ); // Create a new learning algorithm to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <Independent <NormalDistribution> >(hmm, // Train each model until the log-likelihood changes less than 0.001 modelIndex => new BaumWelchLearning <Independent <NormalDistribution> >(hmm.Models[modelIndex]) { Tolerance = 0.001, Iterations = 100, // This is necessary so the code doesn't blow up when it realize // there is only one sample per word class. But this could also be // needed in normal situations as well. // FittingOptions = new IndependentOptions() { InnerOption = new NormalOptions() { Regularization = 1e-5 } } } ); // Finally, we can run the learning algorithm! double logLikelihood = teacher.Run(words, labels); // At this point, the classifier should be successfully // able to distinguish between our three word classes: // int tc1 = hmm.Compute(hello); int tc2 = hmm.Compute(car); int tc3 = hmm.Compute(wardrobe); Assert.AreEqual(0, tc1); Assert.AreEqual(1, tc2); Assert.AreEqual(2, tc3); // Now, we can use the Markov classifier to initialize a HCRF var function = new MarkovMultivariateFunction(hmm); var hcrf = new HiddenConditionalRandomField <double[]>(function); // We can check that both are equivalent, although they have // formulations that can be learned with different methods // for (int i = 0; i < words.Length; i++) { // Should be the same int expected = hmm.Compute(words[i]); int actual = hcrf.Compute(words[i]); // Should be the same double h0 = hmm.LogLikelihood(words[i], 0); double c0 = hcrf.LogLikelihood(words[i], 0); double h1 = hmm.LogLikelihood(words[i], 1); double c1 = hcrf.LogLikelihood(words[i], 1); double h2 = hmm.LogLikelihood(words[i], 2); double c2 = hcrf.LogLikelihood(words[i], 2); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.IsTrue(h1.IsRelativelyEqual(c1, 1e-10)); Assert.IsTrue(h2.IsRelativelyEqual(c2, 1e-10)); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); Assert.IsFalse(double.IsNaN(c2)); } // Now we can learn the HCRF using one of the best learning // algorithms available, Resilient Backpropagation learning: // Create a learning algorithm var rprop = new HiddenResilientGradientLearning <double[]>(hcrf) { Iterations = 50, Tolerance = 1e-5 }; // Run the algorithm and learn the models double error = rprop.Run(words, labels); // At this point, the HCRF should be successfully // able to distinguish between our three word classes: // int hc1 = hcrf.Compute(hello); int hc2 = hcrf.Compute(car); int hc3 = hcrf.Compute(wardrobe); Assert.AreEqual(0, hc1); Assert.AreEqual(1, hc2); Assert.AreEqual(2, hc3); }
private static void resilientgradienthiddenlearning() { // Suppose we would like to learn how to classify the // following set of sequences among three class labels: int[][] inputSequences = { // First class of sequences: starts and // ends with zeros, ones in the middle: new[] { 0, 1, 1, 1, 0 }, new[] { 0, 0, 1, 1, 0, 0 }, new[] { 0, 1, 1, 1, 1, 0 }, // Second class of sequences: starts with // twos and switches to ones until the end. new[] { 2, 2, 2, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 1, 2, 1, 1, 1, 1, 1 }, new[] { 2, 2, 2, 2, 2, 1, 1, 1, 1 }, // Third class of sequences: can start // with any symbols, but ends with three. new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 0, 0, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 2, 2, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, new[] { 0, 0, 1, 1, 3, 3, 3, 3 }, new[] { 2, 2, 0, 3, 3, 3, 3 }, new[] { 1, 0, 1, 2, 3, 3, 3, 3 }, new[] { 1, 1, 2, 3, 3, 3, 3 }, }; // Now consider their respective class labels int[] outputLabels = { /* Sequences 1-3 are from class 0: */ 0, 0, 0, /* Sequences 4-6 are from class 1: */ 1, 1, 1, /* Sequences 7-14 are from class 2: */ 2, 2, 2, 2, 2, 2, 2, 2 }; // Create the Hidden Conditional Random Field using a set of discrete features var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3); var classifier = new HiddenConditionalRandomField<int>(function); // Create a learning algorithm var teacher = new HiddenResilientGradientLearning<int>(classifier) { Iterations = 50 }; // Run the algorithm and learn the models teacher.Run(inputSequences, outputLabels); int[] answers = inputSequences.Apply(classifier.Compute); }
private void btnLearnHCRF_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } var samples = database.Samples; var classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int iterations = 100; double tolerance = 0.01; hcrf = new HiddenConditionalRandomField<double[]>( new MarkovMultivariateFunction(hmm)); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenResilientGradientLearning<double[]>(hcrf) { Iterations = iterations, Tolerance = tolerance }; // Run the learning algorithm double error = teacher.Run(inputs, outputs); foreach (var sample in database.Samples) { sample.RecognizedAs = hcrf.Compute(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } }
private double func(HiddenConditionalRandomField<double[]> model, double[] parameters, double[][][] inputs, int[] outputs) { model.Function.Weights = parameters; return -model.LogLikelihood(inputs, outputs); }
public void GradientTest() { // Creates a sequence classifier containing 2 hidden Markov Models // with 2 states and an underlying Normal distribution as density. MultivariateNormalDistribution density = new MultivariateNormalDistribution(3); var hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(2, new Ergodic(2), density); double[][][] inputs = { new [] { new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 }, new double[] { 0, 1, 0 } }, new [] { new double[] { 1, 6, 2 }, new double[] { 2, 1, 6 }, new double[] { 1, 1, 0 } }, new [] { new double[] { 9, 1, 0 }, new double[] { 0, 1, 5 }, new double[] { 0, 0, 0 } }, }; int[] outputs = { 0, 0, 1 }; var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField<double[]>(function); var target = new ForwardBackwardGradient<double[]>(model); FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, inputs, outputs); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 0.05); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
static void runHiddenConditionalRandomFieldLearningExample() { // Observation sequences should only contain symbols that are greater than or equal to 0, and lesser than the number of symbols. int[][] observationSequences = { // First class of sequences: starts and ends with zeros, ones in the middle. new[] { 0, 1, 1, 1, 0 }, new[] { 0, 0, 1, 1,0, 0 }, new[] { 0, 1, 1, 1,1, 0 }, // Second class of sequences: starts with twos and switches to ones until the end. new[] { 2, 2, 2, 2,1, 1, 1, 1, 1 }, new[] { 2, 2, 1, 2,1, 1, 1, 1, 1 }, new[] { 2, 2, 2, 2,2, 1, 1, 1, 1 }, // Third class of sequences: can start with any symbols, but ends with three. new[] { 0, 0, 1, 1,3, 3, 3, 3 }, new[] { 0, 0, 0, 3,3, 3, 3 }, new[] { 1, 0, 1, 2,2, 2, 3, 3 }, new[] { 1, 1, 2, 3,3, 3, 3 }, new[] { 0, 0, 1, 1,3, 3, 3, 3 }, new[] { 2, 2, 0, 3,3, 3, 3 }, new[] { 1, 0, 1, 2,3, 3, 3, 3 }, new[] { 1, 1, 2, 3,3, 3, 3 }, }; // Consider their respective class labels. // Class labels have to be zero-based and successive integers. int[] classLabels = { 0, 0, 0, // Sequences 1-3 are from class 0. 1, 1, 1, // Sequences 4-6 are from class 1. 2, 2, 2, 2, 2, 2, 2, 2 // Sequences 7-14 are from class 2. }; // Create the Hidden Conditional Random Field using a set of discrete features. var function = new MarkovDiscreteFunction(states: 3, symbols: 4, outputClasses: 3); var hcrf = new HiddenConditionalRandomField <int>(function); // Create a learning algorithm. var trainer = new HiddenResilientGradientLearning <int>(hcrf) { Iterations = 50 }; // Run the algorithm and learn the models. double error = trainer.Run(observationSequences, classLabels); Console.WriteLine("the error in the last iteration = {0}", error); // Check the output classificaton label for some sequences. int y1 = hcrf.Compute(new[] { 0, 1, 1, 1, 0 }); // output is y1 = 0. Console.WriteLine("output class = {0}", y1); int y2 = hcrf.Compute(new[] { 0, 0, 1, 1, 0, 0 }); // output is y2 = 0. Console.WriteLine("output class = {0}", y2); int y3 = hcrf.Compute(new[] { 2, 2, 2, 2, 1, 1 }); // output is y3 = 1. Console.WriteLine("output class = {0}", y3); int y4 = hcrf.Compute(new[] { 2, 2, 1, 1 }); // output is y4 = 1. Console.WriteLine("output class = {0}", y4); int y5 = hcrf.Compute(new[] { 0, 0, 1, 3, 3, 3 }); // output is y5 = 2. Console.WriteLine("output class = {0}", y5); int y6 = hcrf.Compute(new[] { 2, 0, 2, 2, 3, 3 }); // output is y6 = 2. Console.WriteLine("output class = {0}", y6); }
public void ComputeDeoptimizeTest4() { int[] labels; double[][][] words; var model = CreateModel4(out words, out labels, false); var target = new MarkovMultivariateFunction(model); #pragma warning disable 0618 target.Deoptimize(); #pragma warning restore 0618 var hcrf = new HiddenConditionalRandomField<double[]>(target); Assert.AreEqual(3, model.Priors.Length); Assert.AreEqual(1 / 3.0, model.Priors[0]); Assert.AreEqual(1 / 3.0, model.Priors[1]); Assert.AreEqual(1 / 3.0, model.Priors[2]); check4(words, model, target, hcrf); }
public void ComputeTest4() { int[] labels; double[][][] words; HiddenMarkovClassifier<Independent<NormalDistribution>> model = CreateModel4(out words, out labels, false); var target = new MarkovMultivariateFunction(model); var hcrf = new HiddenConditionalRandomField<double[]>(target); Assert.AreEqual(3, model.Priors.Length); Assert.AreEqual(1 / 3.0, model.Priors[0]); Assert.AreEqual(1 / 3.0, model.Priors[1]); Assert.AreEqual(1 / 3.0, model.Priors[2]); check4(words, model, target, hcrf); }
public void ComputeDeoptimizeTest3() { double[][][] sequences; int[] labels; var model = CreateModel3(out sequences, out labels); var target = new MarkovMultivariateFunction(model); #pragma warning disable 0618 target.Deoptimize(); #pragma warning restore 0618 var hcrf = new HiddenConditionalRandomField<double[]>(target); Assert.AreEqual(2, model.Priors.Length); Assert.AreEqual(1 / 2.0, model.Priors[0]); Assert.AreEqual(1 / 2.0, model.Priors[1]); check4(sequences, model, target, hcrf); }
public void ComputeTest2() { double[][][] sequences; int[] labels; var model = CreateModel2(out sequences, out labels); var target = new MarkovMultivariateFunction(model); var hcrf = new HiddenConditionalRandomField<double[]>(target); double actual; double expected; double[][] x = { new double[] { 0, 1.7 }, new double[] { 2, 2.1 } }; for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * model[c].Emissions[i].ProbabilityDensityFunction(x[0]); actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { double xb = Math.Exp(model[c].Transitions[i, j]); double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]); expected = xb * xc; actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } } } actual = model.LogLikelihood(x, c); expected = hcrf.LogLikelihood(x, c); Assert.AreEqual(expected, actual); Assert.IsFalse(double.IsNaN(actual)); } }
public void GradientTest() { var function = new DiscreteMarkovClassifierFunction(2, 2, 2); var model = new HiddenConditionalRandomField<int>(function); var target = new QuasiNewtonHiddenLearning<int>(model); FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-4); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
/// <summary> /// Initializes a new instance of the <see cref="HiddenGradientDescentLearning<T>"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// public HiddenGradientDescentLearning(HiddenConditionalRandomField <T> model) : this() { Model = model; init(); }
private void btnLearnHMM_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList<Sequence> samples = database.Samples; BindingList<String> classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int states = 5; int iterations = 0; double tolerance = 0.01; bool rejection = false; hmm = new HiddenMarkovClassifier<MultivariateNormalDistribution>(classes.Count, new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateNormalDistribution>(hmm, // Train each model using the selected convergence criteria i => new BaumWelchLearning<MultivariateNormalDistribution>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } ); teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm double error = teacher.Run(inputs, outputs); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = hmm.Compute(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } btnLearnHCRF.Enabled = true; hcrf = null; }
public void LogForwardGesturesPriorsTest() { int[] labels; double[][][] words; var classifier = IndependentMarkovFunctionTest.CreateModel4(out words, out labels, true); var function = new MarkovMultivariateFunction(classifier); var target = new HiddenConditionalRandomField<double[]>(function); foreach (var word in words) { for (int c = 0; c < 3; c++) { var actual = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm.LogForward( target.Function.Factors[c], word, c); var expected = Accord.Statistics.Models.Markov.ForwardBackwardAlgorithm.LogForward( classifier[c], word); for (int i = 0; i < actual.GetLength(0); i++) { for (int j = 0; j < actual.GetLength(1); j++) { double a = actual[i, j]; double e = expected[i, j]; // TODO: Verify if is possible to reduce this tolerance Assert.IsTrue(e.IsRelativelyEqual(a, 0.1)); } } } } }
public void RunTest2() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; Accord.Math.Tools.SetupGenerator(0); var function = new MarkovDiscreteFunction(2, 2, 2); var model = new HiddenConditionalRandomField<int>(function); var target = new HiddenConjugateGradientLearning<int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } double ll0 = model.LogLikelihood(inputs, outputs); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-5.5451774444795623, ll0, 1e-10); Assert.AreEqual(0, error, 1e-10); Assert.IsFalse(double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
/// <summary> /// Constructs a new L-BFGS learning algorithm. /// </summary> /// public HiddenQuasiNewtonLearning(HiddenConditionalRandomField <T> model) { Model = model; }
private static void check4(double[][][] words, HiddenMarkovClassifier<Independent> model, MarkovMultivariateFunction target, HiddenConditionalRandomField<double[]> hcrf) { double actual; double expected; foreach (var x in words) { for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions double xa = model.Priors[c]; double xb = Math.Exp(model[c].Probabilities[i]); double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]); expected = xa * xb * xc; actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10)); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { double xb = Math.Exp(model[c].Transitions[i, j]); double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]); expected = xb * xc; actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10)); Assert.IsFalse(double.IsNaN(actual)); } } } actual = Math.Exp(model.LogLikelihood(x, c)); expected = Math.Exp(hcrf.LogLikelihood(x, c)); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(double.IsNaN(actual)); actual = model.Compute(x); expected = hcrf.Compute(x); Assert.AreEqual(expected, actual); Assert.IsFalse(double.IsNaN(actual)); } } }
/// <summary> /// Initializes a new instance of the <see cref="BaseHiddenConditionalRandomFieldLearning<T>"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// protected BaseHiddenConditionalRandomFieldLearning(HiddenConditionalRandomField<T> model) { this.model = model; this.function = model.Function; }
public void LogForwardGesturesPriorsDeoptimizedTest() { int[] labels; double[][][] words; var classifier = IndependentMarkovFunctionTest.CreateModel4(out words, out labels, true); var deopFun = new MarkovMultivariateFunction(classifier); deopFun.Deoptimize(); var target1 = new HiddenConditionalRandomField<double[]>(deopFun); var function = new MarkovMultivariateFunction(classifier); var target2 = new HiddenConditionalRandomField<double[]>(function); foreach (var word in words) { for (int c = 0; c < 3; c++) { for (int y = 0; y < 3; y++) { var actual = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm .LogForward(target1.Function.Factors[c], word, y); var expected = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm .LogForward(target2.Function.Factors[c], word, y); for (int i = 0; i < actual.GetLength(0); i++) { for (int j = 0; j < actual.GetLength(1); j++) { double a = actual[i, j]; double e = expected[i, j]; Assert.IsTrue(e.IsRelativelyEqual(a, 0.1)); } } } } } }
public void GradientDeoptimizeTest3() { double[][][] sequences2; int[] labels2; var hmm = CreateModel3(out sequences2, out labels2); var function = new MarkovMultivariateFunction(hmm); #pragma warning disable 0618 function.Deoptimize(); #pragma warning restore 0618 var model = new HiddenConditionalRandomField<double[]>(function); var target = new ForwardBackwardGradient<double[]>(model); target.Regularization = 2; var inputs = sequences2; var outputs = labels2; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { double e = expected[i]; double a = actual[i]; Assert.AreEqual(e, a, 1e-3); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void RunTest() { var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MultivariateNormalMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField<double[]>(function); var target = new QuasiNewtonHiddenLearning<double[]>(model); var inputs = inputs1; var outputs = outputs1; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double llm = hmm.LogLikelihood(inputs, outputs); double ll0 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(llm, ll0, 1e-10); Assert.IsFalse(double.IsNaN(llm)); Assert.IsFalse(double.IsNaN(ll0)); double error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(-ll1, error, 1e-10); Assert.IsFalse(double.IsNaN(ll1)); Assert.IsFalse(double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.0000041736023117522336, ll0, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
/// <summary> /// Constructs a new Conjugate Gradient learning algorithm. /// </summary> /// public HiddenConjugateGradientLearning(HiddenConditionalRandomField <T> model) { Model = model; }
public void GradientTest3() { var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField<double[]>(function); var target = new ForwardBackwardGradient<double[]>(model); target.Regularization = 2; var inputs = inputs1; var outputs = outputs1; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-3); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void ComputeTest3() { var model = CreateModel3(); var target = new MarkovMultivariateFunction(model); var hcrf = new HiddenConditionalRandomField<double[]>(target); double actual; double expected; for (int k = 0; k < 5; k++) { foreach (var x in sequences2) { for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions double xa = model.Priors[c]; double xb = Math.Exp(model[c].Probabilities[i]); double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]); expected = xa * xb * xc; actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { expected = Math.Exp(model[c].Transitions[i, j]) * model[c].Emissions[j].ProbabilityDensityFunction(x[t]); actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } } } actual = Math.Exp(model.LogLikelihood(x, c)); expected = Math.Exp(hcrf.LogLikelihood(x, c)); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(double.IsNaN(actual)); actual = model.Compute(x); expected = hcrf.Compute(x); Assert.AreEqual(expected, actual); Assert.IsFalse(double.IsNaN(actual)); } } } }
private double func(HiddenConditionalRandomField<double[]> model, double[] parameters, double[][][] inputs, int[] outputs, double beta) { model.Function.Weights = parameters; // Regularization double sumSquaredWeights = 0; if (beta != 0) { for (int i = 0; i < parameters.Length; i++) if (!(Double.IsInfinity(parameters[i]) || Double.IsNaN(parameters[i]))) sumSquaredWeights += parameters[i] * parameters[i]; sumSquaredWeights = sumSquaredWeights * 0.5 / beta; } double logLikelihood = model.LogLikelihood(inputs, outputs); // Maximize the log-likelihood and minimize the sum of squared weights return -logLikelihood + sumSquaredWeights; }
public void GradientTest2() { var hmm = CreateModel3(); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField<double[]>(function); var target = new ForwardBackwardGradient<double[]>(model); var inputs = sequences2; var outputs = labels2; double[] actual = target.Gradient(function.Weights, inputs, outputs); FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, inputs, outputs); double[] expected = diff.Compute(function.Weights); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-3); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
private void openDataDialog_FileOk(object sender, System.ComponentModel.CancelEventArgs e) { hmm = null; hcrf = null; using (var stream = openDataDialog.OpenFile()) database.Load(stream); btnLearnHMM.Enabled = true; btnLearnHCRF.Enabled = false; panelClassification.Visible = false; panelUserLabeling.Visible = false; }
public void ComputeTest3() { double[][][] sequences2; int[] labels2; var model = CreateModel3(out sequences2, out labels2); var target = new MarkovMultivariateFunction(model); var hcrf = new HiddenConditionalRandomField <double[]>(target); double actual; double expected; for (int k = 0; k < 5; k++) { foreach (var x in sequences2) { for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions double xa = model.Priors[c]; double xb = Math.Exp(model[c].Probabilities[i]); double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]); expected = xa * xb * xc; actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { double xb = Math.Exp(model[c].Transitions[i, j]); double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]); expected = xb * xc; actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } } } actual = Math.Exp(model.LogLikelihood(x, c)); expected = Math.Exp(hcrf.LogLikelihood(x, c)); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(double.IsNaN(actual)); actual = model.Compute(x); expected = hcrf.Compute(x); Assert.AreEqual(expected, actual); Assert.IsFalse(double.IsNaN(actual)); } } } }
/// <summary> /// Initializes a new instance of the <see cref="GradientDescentHiddenLearning<T>"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// public GradientDescentHiddenLearning(HiddenConditionalRandomField <T> model) : base(model) { aObservations = new T[1][]; aOutput = new int[1]; }
private static void check4(double[][][] words, HiddenMarkovClassifier <Independent> model, MarkovMultivariateFunction target, HiddenConditionalRandomField <double[]> hcrf) { double actual; double expected; foreach (var x in words) { for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions double xa = model.Priors[c]; double xb = Math.Exp(model[c].Probabilities[i]); double xc = model[c].Emissions[i].ProbabilityDensityFunction(x[0]); expected = xa * xb * xc; actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10)); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { double xb = Math.Exp(model[c].Transitions[i, j]); double xc = model[c].Emissions[j].ProbabilityDensityFunction(x[t]); expected = xb * xc; actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.IsTrue(expected.IsRelativelyEqual(actual, 1e-10)); Assert.IsFalse(double.IsNaN(actual)); } } } actual = Math.Exp(model.LogLikelihood(x, c)); expected = Math.Exp(hcrf.LogLikelihood(x, c)); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(double.IsNaN(actual)); actual = model.Compute(x); expected = hcrf.Compute(x); Assert.AreEqual(expected, actual); Assert.IsFalse(double.IsNaN(actual)); } } }
/// <summary> /// Initializes a new instance of the <see cref="GradientDescentHiddenLearning<T>"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// public GradientDescentHiddenLearning(HiddenConditionalRandomField <T> model) : base(model) { gradient = new double[Model.Function.Weights.Length]; }
/// <summary> /// Initializes a new instance of the <see cref="ForwardBackwardGradient{T}"/> class. /// </summary> /// /// <param name="model">The model to be trained.</param> /// public ForwardBackwardGradient(HiddenConditionalRandomField<T> model) : this() { this.model = model; this.function = model.Function; }
public void ComputeTest5() { var model = CreateModel3(states: 7); var target = new MarkovMultivariateFunction(model); double actual; double expected; double[][] x = { new double[] { 0, 1 }, new double[] { 3, 2 } }; for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * model[c].Emissions[i].ProbabilityDensityFunction(x[0]); actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { expected = Math.Exp(model[c].Transitions[i, j]) * model[c].Emissions[j].ProbabilityDensityFunction(x[t]); actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } } } } var hcrf = new HiddenConditionalRandomField<double[]>(target); for (int i = 0; i < inputTest.Length; i++) { int h = model.Compute(inputTest[i]); int c = hcrf.Compute(inputTest[i]); Assert.AreEqual(h, c); } }
public void GradientTest2() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new ForwardBackwardGradient<int>(model); FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
private void btnLearnHMM_Click(object sender, EventArgs e) { if (gridSamples.Rows.Count == 0) { MessageBox.Show("Please load or insert some data first."); return; } BindingList <Sequence> samples = database.Samples; BindingList <String> classes = database.Classes; double[][][] inputs = new double[samples.Count][][]; int[] outputs = new int[samples.Count]; for (int i = 0; i < inputs.Length; i++) { inputs[i] = samples[i].Input; outputs[i] = samples[i].Output; } int states = 5; int iterations = 0; double tolerance = 0.01; bool rejection = false; hmm = new HiddenMarkovClassifier <MultivariateNormalDistribution, double[]>(classes.Count, new Forward(states), new MultivariateNormalDistribution(2), classes.ToArray()); // Create the learning algorithm for the ensemble classifier var teacher = new HiddenMarkovClassifierLearning <MultivariateNormalDistribution, double[]>(hmm) { // Train each model using the selected convergence criteria Learner = i => new BaumWelchLearning <MultivariateNormalDistribution, double[]>(hmm.Models[i]) { Tolerance = tolerance, Iterations = iterations, FittingOptions = new NormalOptions() { Regularization = 1e-5 } } }; teacher.Empirical = true; teacher.Rejection = rejection; // Run the learning algorithm teacher.Learn(inputs, outputs); // Classify all training instances foreach (var sample in database.Samples) { sample.RecognizedAs = hmm.Decide(sample.Input); } foreach (DataGridViewRow row in gridSamples.Rows) { var sample = row.DataBoundItem as Sequence; row.DefaultCellStyle.BackColor = (sample.RecognizedAs == sample.Output) ? Color.LightGreen : Color.White; } btnLearnHCRF.Enabled = true; hcrf = null; }
public void GradientTest4() { var hmm = IndependentMarkovClassifierPotentialFunctionTest.CreateModel2(); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField<double[]>(function); var target = new ForwardBackwardGradient<double[]>(model); target.Regularization = 0; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, IndependentMarkovClassifierPotentialFunctionTest.sequences, IndependentMarkovClassifierPotentialFunctionTest.labels); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, IndependentMarkovClassifierPotentialFunctionTest.sequences, IndependentMarkovClassifierPotentialFunctionTest.labels); for (int i = 0; i < actual.Length; i++) { if (double.IsNaN(expected[i])) continue; Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); } }