public void GradientTest3() { var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField <double[]>(function); var target = new ForwardBackwardGradient <double[]>(model); target.Regularization = 2; var inputs = inputs1; var outputs = outputs1; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, inputs, outputs, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-3); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void RunTest() { var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField <double[]>(function); var target = new HiddenQuasiNewtonLearning <double[]>(model); var inputs = inputs1; var outputs = outputs1; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } double llm = hmm.LogLikelihood(inputs, outputs); double ll0 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(llm, ll0, 1e-10); Assert.IsFalse(double.IsNaN(llm)); Assert.IsFalse(double.IsNaN(ll0)); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); Assert.AreEqual(-ll1, error, 1e-10); Assert.IsFalse(double.IsNaN(ll1)); Assert.IsFalse(double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.0000041736023117522336, ll0, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }
public void ComputeTest3() { var hmm = MultivariateNormalHiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var owner = new MarkovMultivariateFunction(hmm); double[][] x = { new double[] { 0 }, new double[] { 1 }, new double[] { 3 }, new double[] { 1 }, new double[] { 2 }, new double[] { 8 }, new double[] { 0 }, new double[] { 10 }, new double[] { System.Math.PI }, }; foreach (var factor in owner.Factors) { for (int y = 0; y < owner.Outputs; y++) { double[,] fwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Forward(factor, x, y); double[,] bwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Backward(factor, x, y); double[,] lnfwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogForward(factor, x, y); double[,] lnbwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogBackward(factor, x, y); for (int i = 0; i < fwd.GetLength(0); i++) { for (int j = 0; j < fwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10); } } for (int i = 0; i < bwd.GetLength(0); i++) { for (int j = 0; j < bwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10); } } foreach (var feature in factor) { double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y)); double actual = feature.LogMarginal(lnfwd, lnbwd, x, y); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(Double.IsNaN(actual)); } } } }