Exemplo n.º 1
0
        public void GaussianMixtureModelConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            double[] sample = samples[0];


            // Create a new Gaussian Mixture Model with 2 components
            GaussianMixtureModel gmm = new GaussianMixtureModel(2);

            // Compute the model (estimate)
            gmm.Compute(samples, 0.0001);

            // Classify a single sample
            int c = gmm.Classify(sample);

            Assert.AreEqual(2, gmm.Gaussians.Count);

            for (int i = 0; i < samples.Length; i++)
            {
                sample = samples[i];
                c      = gmm.Classify(sample);

                Assert.AreEqual(c, i >= 5 ? 1 : 0);
            }
        }
Exemplo n.º 2
0
        internal void Train()
        {
            logLike  = new double[maxK]; // Log-likelihood estimate for a given model and training set
            rissanen = new double[maxK]; // Rissanen estimate for a given model and training set
            mdl      = new double[maxK]; // Minimum description length for a given model and training set
            kVar     = new bool[maxK];
            for (int i = 0; i < maxK; ++i)
            {
                // Step 1: Make a GMM with K subclasses
                gmm = new GaussianMixtureModel(kVals[i]);

                // Step 2: fit the gmm to the projection data
                logLike[i] = gmm.Compute(currentProjection, 1e-3, 1.0);

                // Step 3: perform a classification to detect spurious classification
                kVar[i] = (kVals[i] == gmm.Classify(currentProjection).Distinct().ToArray().Length);

                // Step 4: Calculate the MDL for this K
                double L = (double)(kVals[i] * 3 - 1);
                rissanen[i] = 0.5 * L * Math.Log(currentProjection.Length);
                mdl[i]      = -logLike[i] + rissanen[i];
            }

            // Which value of K supported the MDL:
            int ind = Array.IndexOf(mdl, mdl.Min());

            // Find the value of K that supports the lowest mdl and is verified
            K = kVals[ind];
            while (!kVar[ind])
            {
                K = kVals[ind];
                ++ind;
            }

            // Recreate the gmm with the trained value of K
            gmm = new GaussianMixtureModel(K);
            double LL = gmm.Compute(currentProjection, 1e-3, 1.0);

            // Is this a functional classifier?
            if (gmm != null)
            {
                trained = true;
            }

            // Set the STD ellipsoid
            gmm.SetPValue(pValue);
        }
Exemplo n.º 3
0
        private void btnCompute_Click(object sender, EventArgs e)
        {
            // Create a new Gaussian Mixture Model
            GaussianMixtureModel gmm = new GaussianMixtureModel(k);

            // If available, initialize with k-means
            if (kmeans != null)
            {
                gmm.Initialize(kmeans);
            }

            // Compute the model
            gmm.Compute(mixture);

            // Classify all instances in mixture data
            int[] classifications = gmm.Classify(mixture);

            // Draw the classifications
            updateGraph(classifications);
        }
Exemplo n.º 4
0
        public void GaussianMixtureModelConstructorTest()
        {
            Accord.Math.Tools.SetupGenerator(0);

            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            double[] sample = samples[0];


            // Create a new Gaussian Mixture Model with 2 components
            GaussianMixtureModel gmm = new GaussianMixtureModel(2);

            // Compute the model (estimate)
            gmm.Compute(samples, 0.0001);

            // Classify a single sample
            int c = gmm.Classify(sample);

            Assert.AreEqual(2, gmm.Gaussians.Count);

            for (int i = 0; i < samples.Length; i++)
            {
                sample = samples[i];
                c = gmm.Classify(sample);

                Assert.AreEqual(c, i >= 5 ? 1 : 0);
            }
        }
Exemplo n.º 5
0
        private void btnCompute_Click(object sender, EventArgs e)
        {
            // Create a new Gaussian Mixture Model
            GaussianMixtureModel gmm = new GaussianMixtureModel(k);

            // If available, initialize with k-means
            if (kmeans != null) gmm.Initialize(kmeans);

            // Compute the model
            gmm.Compute(mixture);

            // Classify all instances in mixture data
            int[] classifications = gmm.Classify(mixture);

            // Draw the classifications
            updateGraph(classifications);
        }
Exemplo n.º 6
0
        internal void Train()
        {
            logLike = new double[maxK]; // Log-likelihood estimate for a given model and training set
            rissanen = new double[maxK]; // Rissanen estimate for a given model and training set
            mdl = new double[maxK]; // Minimum description length for a given model and training set
            kVar = new bool[maxK];
            for (int i = 0; i < maxK; ++i)
            {
                // Step 1: Make a GMM with K subclasses
                gmm = new GaussianMixtureModel(kVals[i]);

                // Step 2: fit the gmm to the projection data
                logLike[i] = gmm.Compute(currentProjection, 1e-3, 1.0);

                // Step 3: perform a classification to detect spurious classification
                kVar[i] = (kVals[i] == gmm.Classify(currentProjection).Distinct().ToArray().Length);

                // Step 4: Calculate the MDL for this K
                double L = (double)(kVals[i] * 3 - 1);
                rissanen[i] = 0.5 * L * Math.Log(currentProjection.Length);
                mdl[i] = -logLike[i] + rissanen[i];
            }

            // Which value of K supported the MDL:
            int ind = Array.IndexOf(mdl, mdl.Min());

            // Find the value of K that supports the lowest mdl and is verified
            K = kVals[ind];
            while (!kVar[ind])
            {
                K = kVals[ind];
                ++ind;
            }

            // Recreate the gmm with the trained value of K
            gmm = new GaussianMixtureModel(K);
            double LL = gmm.Compute(currentProjection, 1e-3, 1.0);

            // Is this a functional classifier?
            if (gmm != null)
                trained = true;

            // Set the STD ellipsoid
            gmm.SetPValue(pValue);
        }
Exemplo n.º 7
0
 internal void Classify()
 {
     classes = gmm.Classify(currentProjection);
 }