public void GradientTest3() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new ForwardBackwardGradient <int>(model); target.Regularization = 2; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void RunTest() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new HiddenGradientDescentLearning <int>(model); target.LearningRate = 1000; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } double ll0 = model.LogLikelihood(inputs, outputs); double error = Double.NegativeInfinity; for (int i = 0; i < 50; i++) { error = target.RunEpoch(inputs, outputs); } double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00027018722449589916, error, 1e-10); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }
public void HiddenConditionalRandomFieldConstructorTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var target = new HiddenConditionalRandomField <int>(function); Assert.AreEqual(function, target.Function); Assert.AreEqual(2, target.Function.Factors[0].States); }
public void ComputeTest() { var hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); IPotentialFunction <int> owner = new MarkovDiscreteFunction(hmm); int[] x = new int[] { 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0 }; foreach (var factor in owner.Factors) { for (int y = 0; y < owner.Outputs; y++) { double[,] fwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Forward(factor, x, y); double[,] bwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Backward(factor, x, y); double[,] lnfwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogForward(factor, x, y); double[,] lnbwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogBackward(factor, x, y); for (int i = 0; i < fwd.GetLength(0); i++) { for (int j = 0; j < fwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10); } } for (int i = 0; i < bwd.GetLength(0); i++) { for (int j = 0; j < bwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10); } } foreach (var feature in factor) { double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y)); double actual = feature.LogMarginal(lnfwd, lnbwd, x, y); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(Double.IsNaN(actual)); } } } }
public void ComputeTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); // Declare some testing data int[][] inputs = new int[][] { new int[] { 0, 1, 1, 0 }, // Class 0 new int[] { 0, 0, 1, 0 }, // Class 0 new int[] { 0, 1, 1, 1, 0 }, // Class 0 new int[] { 0, 1, 0 }, // Class 0 new int[] { 1, 0, 0, 1 }, // Class 1 new int[] { 1, 1, 0, 1 }, // Class 1 new int[] { 1, 0, 0, 0, 1 }, // Class 1 new int[] { 1, 0, 1 }, // Class 1 }; int[] outputs = new int[] { 0, 0, 0, 0, // First four sequences are of class 0 1, 1, 1, 1, // Last four sequences are of class 1 }; var function = new MarkovDiscreteFunction(hmm); var target = new HiddenConditionalRandomField <int>(function); for (int i = 0; i < inputs.Length; i++) { int expected = hmm.Compute(inputs[i]); int actual = target.Compute(inputs[i]); double h0 = hmm.LogLikelihood(inputs[i], 0); double h1 = hmm.LogLikelihood(inputs[i], 1); double c0 = target.LogLikelihood(inputs[i], 0); double c1 = target.LogLikelihood(inputs[i], 1); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.AreEqual(h1, c1, 1e-10); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); } }
public void RunTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new MarkovDiscreteFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new HiddenQuasiNewtonLearning <int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } double ll0 = model.LogLikelihood(inputs, outputs); double error = target.Run(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.0, error, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }