public static CreateModel4 ( double &words, int &labels, bool usePriors ) : HiddenMarkovClassifier |
||
words | double | |
labels | int | |
usePriors | bool | |
return | HiddenMarkovClassifier |
public void LogForwardGesturesDeoptimizedTest() { int[] labels; double[][][] words; var classifier = IndependentMarkovFunctionTest.CreateModel4(out words, out labels, false); var function = new MarkovMultivariateFunction(classifier); function.Deoptimize(); var target = new HiddenConditionalRandomField <double[]>(function); foreach (var word in words) { for (int c = 0; c < 3; c++) { var actual = Accord.Statistics.Models.Fields.ForwardBackwardAlgorithm.LogForward( target.Function.Factors[c], word, c); var expected = Accord.Statistics.Models.Markov.ForwardBackwardAlgorithm.LogForward( classifier[c], word); for (int i = 0; i < actual.GetLength(0); i++) { for (int j = 0; j < actual.GetLength(1); j++) { double a = actual[i, j]; double e = expected[i, j]; Assert.IsTrue(e.IsRelativelyEqual(a, 0.1)); } } } } }
public void GradientTest_MarkovIndependentNormal_NoPriors() { double[][][] observations; int[] labels; HiddenMarkovClassifier <Independent <NormalDistribution> > hmm = IndependentMarkovFunctionTest.CreateModel4(out observations, out labels, usePriors: false); var function = new MarkovMultivariateFunction(hmm); var model = new HiddenConditionalRandomField <double[]>(function); var target = new ForwardBackwardGradient <double[]>(model); target.Regularization = 0; FiniteDifferences diff = new FiniteDifferences(function.Weights.Length); diff.Function = parameters => func(model, parameters, observations, labels); double[] expected = diff.Compute(function.Weights); double[] actual = target.Gradient(function.Weights, observations, labels); for (int i = 0; i < actual.Length; i++) { if (double.IsNaN(expected[i])) { continue; } Assert.AreEqual(expected[i], actual[i], 1e-5); Assert.IsFalse(double.IsNaN(actual[i])); } }