public void ConstructorTest1() { MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); var mixture = new MultivariateMixture<MultivariateNormalDistribution>(components); double[] expected = { 0.5, 0.5 }; Assert.IsTrue(expected.IsEqual(mixture.Coefficients)); Assert.AreEqual(components, mixture.Components); }
public void ProbabilityDensityFunctionTest() { MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); double[] coefficients = { 0.3, 0.7 }; var mixture = new MultivariateMixture<MultivariateNormalDistribution>(coefficients, components); double[] x = { 1.2 }; double expected = 0.3 * components[0].ProbabilityDensityFunction(x) + 0.7 * components[1].ProbabilityDensityFunction(x); double actual = mixture.ProbabilityDensityFunction(x); Assert.AreEqual(expected, actual); }
public void MixtureWeightsFitTest2() { MemoryStream stream = new MemoryStream(Resources.CircleWithWeights); ExcelReader reader = new ExcelReader(stream, xlsx: false); DataTable table = reader.GetWorksheet("Sheet1"); double[,] matrix = table.ToMatrix(); double[][] points = matrix.Submatrix(null, 0, 1).ToArray(); double[] weights = matrix.GetColumn(2); // Randomly initialize some mixture components MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 0, 1 }, Matrix.Identity(2)); components[1] = new MultivariateNormalDistribution(new double[] { 1, 0 }, Matrix.Identity(2)); // Create an initial mixture var mixture = new MultivariateMixture<MultivariateNormalDistribution>(components); mixture.Fit(points, weights); // Our model will be: double mean00 = mixture.Components[0].Mean[0]; double mean01 = mixture.Components[0].Mean[1]; double mean10 = mixture.Components[1].Mean[0]; double mean11 = mixture.Components[1].Mean[1]; Assert.AreEqual(-0.11704994950834195, mean00, 1e-10); Assert.AreEqual(0.11603470123007256, mean01, 1e-10); Assert.AreEqual(0.11814483652855159, mean10, 1e-10); Assert.AreEqual(-0.12029275652994373, mean11, 1e-10); }
public void FitTest2() { double[] coefficients = { 0.50, 0.50 }; MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); var target = new MultivariateMixture<MultivariateNormalDistribution>(coefficients, components); double[][] values = { new double[] { 2512512312 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 }, new double[] { 1 }, new double[] { 6 }, new double[] { 6 }, new double[] { 5 }, new double[] { 7 }, new double[] { 5 } }; double[] weights = { 0, 1, 1, 1, 1, 1, 1, 1, 1, 1 }; weights = weights.Divide(weights.Sum()); double[][] part1 = values.Submatrix(1, 4); double[][] part2 = values.Submatrix(5, 9); target.Fit(values, weights); var mean1 = Accord.Statistics.Tools.Mean(part1); var var1 = Accord.Statistics.Tools.Variance(part1); Assert.AreEqual(mean1[0], target.Components[0].Mean[0], 1e-5); Assert.AreEqual(var1[0], target.Components[0].Variance[0], 1e-5); var mean2 = Accord.Statistics.Tools.Mean(part2); var var2 = Accord.Statistics.Tools.Variance(part2); Assert.AreEqual(mean2[0], target.Components[1].Mean[0], 1e-5); Assert.AreEqual(var2[0], target.Components[1].Variance[0], 1e-5); var expectedMean = Accord.Statistics.Tools.WeightedMean(values, weights); var expectedVar = Accord.Statistics.Tools.WeightedCovariance(values, weights); var actualMean = target.Mean; var actualVar = target.Covariance; Assert.AreEqual(expectedMean[0], actualMean[0], 0.0000001); Assert.AreEqual(expectedVar[0, 0], actualVar[0, 0], 0.68); }
public void MixtureWeightsFitTest() { // Randomly initialize some mixture components MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); // Create an initial mixture var mixture = new MultivariateMixture<MultivariateNormalDistribution>(components); // Now, suppose we have a weighted data // set. Those will be the input points: double[][] points = new double[] { 0, 3, 1, 7, 3, 5, 1, 2, -1, 2, 7, 6, 8, 6 } // (14 points) .ToArray(); // And those are their respective unormalized weights: double[] weights = { 1, 1, 1, 2, 2, 1, 1, 1, 2, 1, 2, 3, 1, 1 }; // (14 weights) // Let's normalize the weights so they sum up to one: weights = weights.Divide(weights.Sum()); // Now we can fit our model to the data: mixture.Fit(points, weights); // done! // Our model will be: double mean1 = mixture.Components[0].Mean[0]; // 1.41126 double mean2 = mixture.Components[1].Mean[0]; // 6.53301 // If we need the GaussianMixtureModel functionality, we can // use the estimated mixture to initialize a new model: GaussianMixtureModel gmm = new GaussianMixtureModel(mixture); Assert.AreEqual(mean1, gmm.Gaussians[0].Mean[0]); Assert.AreEqual(mean2, gmm.Gaussians[1].Mean[0]); Assert.AreEqual(1.4112610766836404, mean1, 1e-10); Assert.AreEqual(6.5330177004151082, mean2, 1e-10); Assert.AreEqual(mixture.Coefficients[0], gmm.Gaussians[0].Proportion); Assert.AreEqual(mixture.Coefficients[1], gmm.Gaussians[1].Proportion); }
public void LogProbabilityDensityFunctionTestPerComponent() { MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); double[] coefficients = { 0.3, 0.7 }; var mixture = new MultivariateMixture<MultivariateNormalDistribution>(coefficients, components); double[] x = { 1.2 }; double expected = System.Math.Log( mixture.ProbabilityDensityFunction(0, x) + mixture.ProbabilityDensityFunction(1, x)); double actual = mixture.LogProbabilityDensityFunction(x); Assert.AreEqual(expected, actual, 1e-10); Assert.IsFalse(double.IsNaN(actual)); }
public void DistributionFunctionTestPerComponent() { MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); double[] coefficients = { 0.3, 0.7 }; var mixture = new MultivariateMixture<MultivariateNormalDistribution>(coefficients, components); double[] x = { 1.2 }; double expected = mixture.DistributionFunction(0, x) + mixture.DistributionFunction(1, x); double actual = mixture.DistributionFunction(x); Assert.AreEqual(expected, actual); }
public void FitTest() { double[] coefficients = { 0.50, 0.50 }; MultivariateNormalDistribution[] components = new MultivariateNormalDistribution[2]; components[0] = new MultivariateNormalDistribution(new double[] { 2 }, new double[,] { { 1 } }); components[1] = new MultivariateNormalDistribution(new double[] { 5 }, new double[,] { { 1 } }); var target = new MultivariateMixture<MultivariateNormalDistribution>(coefficients, components); double[][] values = { new double[] { 0 }, new double[] { 1 }, new double[] { 1 }, new double[] { 0 }, new double[] { 1 }, new double[] { 6 }, new double[] { 6 }, new double[] { 5 }, new double[] { 7 }, new double[] { 5 } }; double[][] part1 = values.Submatrix(0, 4); double[][] part2 = values.Submatrix(5, 9); target.Fit(values); var mean1 = Measures.Mean(part1, dimension: 0); var var1 = Measures.Variance(part1); Assert.AreEqual(mean1[0], target.Components[0].Mean[0], 1e-5); Assert.AreEqual(var1[0], target.Components[0].Variance[0], 1e-5); var mean2 = Measures.Mean(part2, dimension: 0); var var2 = Measures.Variance(part2); Assert.AreEqual(mean2[0], target.Components[1].Mean[0], 1e-5); Assert.AreEqual(var2[0], target.Components[1].Variance[0], 1e-5); var expectedMean = Measures.Mean(values, dimension: 0); var expectedVar = Measures.Covariance(values); var actualMean = target.Mean; var actualVar = target.Covariance; Assert.AreEqual(expectedMean[0], actualMean[0], 0.0000001); // Assert.AreEqual(expectedVar[0, 0], actualVar[0, 0], 0.0000001); }
public void LearnTest4() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0 }, new double[] { 1 }, new double[] { 2 }, new double[] { 3 }, new double[] { 4 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4 }, new double[] { 3 }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; // Create a mixture of two 1-dimensional normal distributions (by default, // initialized with zero mean and unit covariance matrices). var density = new MultivariateMixture<MultivariateNormalDistribution>( new MultivariateNormalDistribution(1), new MultivariateNormalDistribution(1)); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier<MultivariateMixture<MultivariateNormalDistribution>>( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning<MultivariateMixture<MultivariateNormalDistribution>>( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning<MultivariateMixture<MultivariateNormalDistribution>>( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double likelihood1, likelihood2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out likelihood1); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out likelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-13.271981026832933, logLikelihood, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood1, 1e-10); Assert.AreEqual(0.99999791320102149, likelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(likelihood1)); Assert.IsFalse(double.IsNaN(likelihood2)); }
/// <summary> /// Initializes the model with initial values. /// </summary> /// public void Initialize(MultivariateMixture <MultivariateNormalDistribution> mixture) { clusters.Initialize(mixture); }
/// <summary> /// Initializes the model with initial values. /// </summary> /// public void Initialize(MultivariateMixture <MultivariateNormalDistribution> mixture) { Initialize(mixture.Coefficients, mixture.Components); }
public void LearnTest4() { // Create a Continuous density Hidden Markov Model Sequence Classifier // to detect a multivariate sequence and the same sequence backwards. double[][][] sequences = new double[][][] { new double[][] { // This is the first sequence with label = 0 new double[] { 0 }, new double[] { 1 }, new double[] { 2 }, new double[] { 3 }, new double[] { 4 }, }, new double[][] { // This is the second sequence with label = 1 new double[] { 4 }, new double[] { 3 }, new double[] { 2 }, new double[] { 1 }, new double[] { 0 }, } }; // Labels for the sequences int[] labels = { 0, 1 }; // Create a mixture of two 1-dimensional normal distributions (by default, // initialized with zero mean and unit covariance matrices). var density = new MultivariateMixture <MultivariateNormalDistribution>( new MultivariateNormalDistribution(1), new MultivariateNormalDistribution(1)); // Creates a sequence classifier containing 2 hidden Markov Models with 2 states // and an underlying multivariate mixture of Normal distributions as density. var classifier = new HiddenMarkovClassifier <MultivariateMixture <MultivariateNormalDistribution> >( 2, new Ergodic(2), density); // Configure the learning algorithms to train the sequence classifier var teacher = new HiddenMarkovClassifierLearning <MultivariateMixture <MultivariateNormalDistribution> >( classifier, // Train each model until the log-likelihood changes less than 0.0001 modelIndex => new BaumWelchLearning <MultivariateMixture <MultivariateNormalDistribution> >( classifier.Models[modelIndex]) { Tolerance = 0.0001, Iterations = 0, } ); // Train the sequence classifier using the algorithm double logLikelihood = teacher.Run(sequences, labels); // Calculate the probability that the given // sequences originated from the model double logLikelihood1, logLikelihood2; // Try to classify the 1st sequence (output should be 0) int c1 = classifier.Compute(sequences[0], out logLikelihood1); // Try to classify the 2nd sequence (output should be 1) int c2 = classifier.Compute(sequences[1], out logLikelihood2); Assert.AreEqual(0, c1); Assert.AreEqual(1, c2); Assert.AreEqual(-13.271981026832933, logLikelihood, 1e-10); Assert.AreEqual(0.99999791320102149, logLikelihood1, 1e-10); Assert.AreEqual(0.99999791320102149, logLikelihood2, 1e-10); Assert.IsFalse(double.IsNaN(logLikelihood)); Assert.IsFalse(double.IsNaN(logLikelihood1)); Assert.IsFalse(double.IsNaN(logLikelihood2)); }