public void GradientTest3() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new QuasiNewtonHiddenLearning <int>(model); target.Regularization = 2; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void RunTest() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new GradientDescentHiddenLearning <int>(model); target.LearningRate = 1000; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } double ll0 = model.LogLikelihood(inputs, outputs); double error = Double.NegativeInfinity; for (int i = 0; i < 50; i++) { error = target.RunEpoch(inputs, outputs); } double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00027018722449589916, error, 1e-10); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }
public void HiddenConditionalRandomFieldConstructorTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var target = new HiddenConditionalRandomField <int>(function); Assert.AreEqual(function, target.Function); Assert.AreEqual(2, target.Function.Factors[0].States); }
public void ComputeTest() { var hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); IPotentialFunction <int> owner = new DiscreteMarkovClassifierFunction(hmm); int[] x = new int[] { 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0 }; foreach (var factor in owner.Factors) { for (int y = 0; y < owner.Outputs; y++) { double[,] fwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Forward(factor, x, y); double[,] bwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Backward(factor, x, y); double[,] lnfwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogForward(factor, x, y); double[,] lnbwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogBackward(factor, x, y); for (int i = 0; i < fwd.GetLength(0); i++) { for (int j = 0; j < fwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10); } } for (int i = 0; i < bwd.GetLength(0); i++) { for (int j = 0; j < bwd.GetLength(1); j++) { Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10); } } foreach (var feature in factor) { double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y)); double actual = feature.LogMarginal(lnfwd, lnbwd, x, y); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(Double.IsNaN(actual)); } } } }
public void RunTest2() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; Accord.Math.Tools.SetupGenerator(0); var function = new DiscreteMarkovClassifierFunction(2, 2, 2); var model = new HiddenConditionalRandomField <int>(function); var target = new GradientDescentHiddenLearning <int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } double ll0 = model.LogLikelihood(inputs, outputs); double error = Double.PositiveInfinity; for (int i = 0; i < 50; i++) { error = target.RunEpoch(inputs, outputs); } double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-5.5451774444795623, ll0, 1e-10); Assert.AreEqual(0, error, 1e-5); Assert.IsFalse(double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }
public void ComputeTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); // Declare some testing data int[][] inputs = new int[][] { new int[] { 0, 1, 1, 0 }, // Class 0 new int[] { 0, 0, 1, 0 }, // Class 0 new int[] { 0, 1, 1, 1, 0 }, // Class 0 new int[] { 0, 1, 0 }, // Class 0 new int[] { 1, 0, 0, 1 }, // Class 1 new int[] { 1, 1, 0, 1 }, // Class 1 new int[] { 1, 0, 0, 0, 1 }, // Class 1 new int[] { 1, 0, 1 }, // Class 1 }; int[] outputs = new int[] { 0, 0, 0, 0, // First four sequences are of class 0 1, 1, 1, 1, // Last four sequences are of class 1 }; var function = new DiscreteMarkovClassifierFunction(hmm); var target = new HiddenConditionalRandomField <int>(function); for (int i = 0; i < inputs.Length; i++) { int expected = hmm.Compute(inputs[i]); int actual = target.Compute(inputs[i]); double h0 = hmm.LogLikelihood(inputs[i], 0); double h1 = hmm.LogLikelihood(inputs[i], 1); double c0 = target.LogLikelihood(inputs[i], 0); double c1 = target.LogLikelihood(inputs[i], 1); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.AreEqual(h1, c1, 1e-10); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); } }
public void RunTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField <int>(function); var target = new QuasiNewtonHiddenLearning <int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } double ll0 = model.LogLikelihood(inputs, outputs); double error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00012170806701733428, error, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) { Assert.AreEqual(expected[i], actual[i]); } Assert.IsTrue(ll1 > ll0); }
public void HiddenMarkovHiddenPotentialFunctionConstructorTest() { HiddenMarkovClassifier model = CreateModel1(); DiscreteMarkovClassifierFunction target = new DiscreteMarkovClassifierFunction(model); var features = target.Features; double[] weights = target.Weights; Assert.AreEqual(22, features.Length); Assert.AreEqual(22, weights.Length); int k = 0; for (int c = 0; c < model.Classes; c++) { Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]); for (int i = 0; i < model[c].States; i++) { Assert.AreEqual(model[c].Probabilities[i], weights[k++]); } for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { Assert.AreEqual(model[c].Transitions[i, j], weights[k++]); } } for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model.Symbols; j++) { Assert.AreEqual(model[c].Emissions[i, j], weights[k++]); } } } }
public void ComputeTest() { HiddenMarkovClassifier model = CreateModel1(); DiscreteMarkovClassifierFunction target = new DiscreteMarkovClassifierFunction(model); double actual; double expected; int[] x = { 0, 1 }; for (int c = 0; c < model.Classes; c++) { for (int i = 0; i < model[c].States; i++) { // Check initial state transitions expected = model.Priors[c] * Math.Exp(model[c].Probabilities[i]) * Math.Exp(model[c].Emissions[i, x[0]]); actual = Math.Exp(target.Factors[c].Compute(-1, i, x, 0, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } for (int t = 1; t < x.Length; t++) { // Check normal state transitions for (int i = 0; i < model[c].States; i++) { for (int j = 0; j < model[c].States; j++) { expected = model.Priors[c] * Math.Exp(model[c].Transitions[i, j]) * Math.Exp(model[c].Emissions[j, x[t]]); actual = Math.Exp(target.Factors[c].Compute(i, j, x, t, c)); Assert.AreEqual(expected, actual, 1e-6); Assert.IsFalse(double.IsNaN(actual)); } } } } }
public void ComputeTest() { var hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); IPotentialFunction<int> owner = new DiscreteMarkovClassifierFunction(hmm); int[] x = new int[] { 0, 0, 1, 0, 0, 0, 1, 0, 1, 1, 1, 1, 1, 0 }; foreach (var factor in owner.Factors) { for (int y = 0; y < owner.Outputs; y++) { double[,] fwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Forward(factor, x, y); double[,] bwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.Backward(factor, x, y); double[,] lnfwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogForward(factor, x, y); double[,] lnbwd = Accord.Statistics.Models.Fields .ForwardBackwardAlgorithm.LogBackward(factor, x, y); for (int i = 0; i < fwd.GetLength(0); i++) for (int j = 0; j < fwd.GetLength(1); j++) Assert.AreEqual(System.Math.Log(fwd[i, j]), lnfwd[i, j], 1e-10); for (int i = 0; i < bwd.GetLength(0); i++) for (int j = 0; j < bwd.GetLength(1); j++) Assert.AreEqual(System.Math.Log(bwd[i, j]), lnbwd[i, j], 1e-10); foreach (var feature in factor) { double expected = System.Math.Log(feature.Marginal(fwd, bwd, x, y)); double actual = feature.LogMarginal(lnfwd, lnbwd, x, y); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(Double.IsNaN(actual)); } } } }
public void GradientTest3() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new QuasiNewtonHiddenLearning<int>(model); target.Regularization = 2; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, target.Regularization); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, inputs, outputs); for (int i = 0; i < actual.Length; i++) { Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); Assert.IsFalse(double.IsNaN(expected[i])); } }
public void RunTest2() { Accord.Math.Tools.SetupGenerator(0); var function = new DiscreteMarkovClassifierFunction(2, 2, 2); var model = new HiddenConditionalRandomField<int>(function); var target = new QuasiNewtonHiddenLearning<int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } double ll0 = model.LogLikelihood(inputs, outputs); double error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-5.5451774444795623, ll0, 1e-10); Assert.AreEqual(3.0990811490142045, error, 1e-10); Assert.IsFalse(double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void RunTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new QuasiNewtonHiddenLearning<int>(model); double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double ll0 = model.LogLikelihood(inputs, outputs); double error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00012170806701733428, error, 1e-10); Assert.AreEqual(error, -ll1); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void RunTest() { var inputs = QuasiNewtonHiddenLearningTest.inputs; var outputs = QuasiNewtonHiddenLearningTest.outputs; HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var model = new HiddenConditionalRandomField<int>(function); var target = new GradientDescentHiddenLearning<int>(model); target.LearningRate = 1000; double[] actual = new double[inputs.Length]; double[] expected = new double[inputs.Length]; for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); double ll0 = model.LogLikelihood(inputs, outputs); double error = Double.NegativeInfinity; for (int i = 0; i < 50; i++) error = target.RunEpoch(inputs, outputs); double ll1 = model.LogLikelihood(inputs, outputs); for (int i = 0; i < inputs.Length; i++) { actual[i] = model.Compute(inputs[i]); expected[i] = outputs[i]; } Assert.AreEqual(-0.00046872579976353634, ll0, 1e-10); Assert.AreEqual(0.00027018722449589916, error, 1e-10); Assert.IsFalse(Double.IsNaN(ll0)); Assert.IsFalse(Double.IsNaN(error)); for (int i = 0; i < inputs.Length; i++) Assert.AreEqual(expected[i], actual[i]); Assert.IsTrue(ll1 > ll0); }
public void HiddenMarkovHiddenPotentialFunctionConstructorTest() { HiddenMarkovClassifier model = CreateModel1(); DiscreteMarkovClassifierFunction target = new DiscreteMarkovClassifierFunction(model); var features = target.Features; double[] weights = target.Weights; Assert.AreEqual(22, features.Length); Assert.AreEqual(22, weights.Length); int k = 0; for (int c = 0; c < model.Classes; c++) { Assert.AreEqual(Math.Log(model.Priors[c]), weights[k++]); for (int i = 0; i < model[c].States; i++) Assert.AreEqual(model[c].Probabilities[i], weights[k++]); for (int i = 0; i < model[c].States; i++) for (int j = 0; j < model[c].States; j++) Assert.AreEqual(model[c].Transitions[i, j], weights[k++]); for (int i = 0; i < model[c].States; i++) for (int j = 0; j < model.Symbols; j++) Assert.AreEqual(model[c].Emissions[i, j], weights[k++]); } }
public void HiddenConditionalRandomFieldConstructorTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); var function = new DiscreteMarkovClassifierFunction(hmm); var target = new HiddenConditionalRandomField<int>(function); Assert.AreEqual(function, target.Function); Assert.AreEqual(2, target.Function.Factors[0].States); }
public void ComputeTest() { HiddenMarkovClassifier hmm = HiddenMarkovClassifierPotentialFunctionTest.CreateModel1(); // Declare some testing data int[][] inputs = new int[][] { new int[] { 0,1,1,0 }, // Class 0 new int[] { 0,0,1,0 }, // Class 0 new int[] { 0,1,1,1,0 }, // Class 0 new int[] { 0,1,0 }, // Class 0 new int[] { 1,0,0,1 }, // Class 1 new int[] { 1,1,0,1 }, // Class 1 new int[] { 1,0,0,0,1 }, // Class 1 new int[] { 1,0,1 }, // Class 1 }; int[] outputs = new int[] { 0,0,0,0, // First four sequences are of class 0 1,1,1,1, // Last four sequences are of class 1 }; var function = new DiscreteMarkovClassifierFunction(hmm); var target = new HiddenConditionalRandomField<int>(function); for (int i = 0; i < inputs.Length; i++) { int expected = hmm.Compute(inputs[i]); int actual = target.Compute(inputs[i]); double h0 = hmm.LogLikelihood(inputs[i], 0); double h1 = hmm.LogLikelihood(inputs[i], 1); double c0 = target.LogLikelihood(inputs[i], 0); double c1 = target.LogLikelihood(inputs[i], 1); Assert.AreEqual(expected, actual); Assert.AreEqual(h0, c0, 1e-10); Assert.AreEqual(h1, c1, 1e-10); Assert.IsFalse(double.IsNaN(c0)); Assert.IsFalse(double.IsNaN(c1)); } }