Example #1
0
        public void GaussianMixtureModelExample()
        {
            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 },
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            double[] sample = samples[0];


            // Create a new Gaussian mixture with 2 components
            var gmm = new GaussianMixtureModel(components: 2);

            // Compute the model (estimate)
            var clusters = gmm.Learn(samples);

            double error = gmm.LogLikelihood;

            // Classify a single sample
            int c0 = clusters.Decide(samples[0]);
            int c1 = clusters.Decide(samples[1]);

            int c7 = clusters.Decide(samples[7]);
            int c8 = clusters.Decide(samples[8]);

            Assert.AreEqual(c0, c1);
            Assert.AreEqual(c7, c8);
            Assert.AreNotEqual(c0, c8);


            // Extract the multivariate Normal distribution from it
            MultivariateMixture <MultivariateNormalDistribution> mixture =
                gmm.ToMixtureDistribution();

            Assert.AreEqual(2, mixture.Dimension);
            Assert.AreEqual(2, mixture.Components.Length);
            Assert.AreEqual(2, mixture.Coefficients.Length);
        }
        public void GaussianMixtureModelExample()
        {
            // Test Samples
            double[][] samples =
            {
                new double[] { 0, 1 },
                new double[] { 1, 2 }, 
                new double[] { 1, 1 },
                new double[] { 0, 7 },
                new double[] { 1, 1 },
                new double[] { 6, 2 },
                new double[] { 6, 5 },
                new double[] { 5, 1 },
                new double[] { 7, 1 },
                new double[] { 5, 1 }
            };

            double[] sample = samples[0];


            // Create a new Gaussian mixture with 2 components
            var gmm = new GaussianMixtureModel(components: 2);

            // Compute the model (estimate)
            double error = gmm.Compute(samples);

            // Classify a single sample
            int c0 = gmm.Gaussians.Nearest(samples[0]);
            int c1 = gmm.Gaussians.Nearest(samples[1]);

            int c7 = gmm.Gaussians.Nearest(samples[7]);
            int c8 = gmm.Gaussians.Nearest(samples[8]);

            Assert.AreEqual(c0, c1);
            Assert.AreEqual(c7, c8);
            Assert.AreNotEqual(c0, c8);


            // Extract the multivariate Normal distribution from it
            MultivariateMixture<MultivariateNormalDistribution> mixture =
                gmm.ToMixtureDistribution();

            Assert.AreEqual(2, mixture.Dimension);
            Assert.AreEqual(2, mixture.Components.Length);
            Assert.AreEqual(2, mixture.Coefficients.Length);
        }
Example #3
0
        /// <summary>
        /// Generates both training set consisting of feasible and infeasible examples
        /// </summary>
        public void GenerateTrainingData()
        {
            Feasibles = GenerateFeasibleExamples(GlobalVariables.FeasibleExamplesCount);

            // Fill training data
            TrainingData = Feasibles.AddColumnWithValues(1.0);

            // Fill output List
            var tempArr = new double[TrainingData.Count];

            Output.AddRange(tempArr.Select(x => 1));

            _dal.TrainingFeasibleExamples = Feasibles.ToArray();

            GaussianMixtureModel gmm = new GaussianMixtureModel(GlobalVariables.Components);

            gmm = new GaussianMixtureModel(GlobalVariables.Components)
            {
                Initializations = 100,
                MaxIterations   = 10000,
                ParallelOptions = new System.Threading.Tasks.ParallelOptions()
                {
                    MaxDegreeOfParallelism = 1
                },
                Tolerance = 10E-11,
                Options   = new Accord.Statistics.Distributions.Fitting.NormalOptions()
                {
                    Regularization = double.Epsilon
                }
            };

            // Estimate the Gaussian Mixture
            gmm.Learn(_dal.TrainingFeasibleExamples);
            var iterations   = gmm.Iterations;
            var distribution = gmm.ToMixtureDistribution();

            // Get minimal probability of probability density function from distribution (percentile 0)
            var minimalProbability = _dal.TrainingFeasibleExamples
                                     .Select(item => distribution.ProbabilityDensityFunction(item))
                                     .Min();

            // Rescale data range for infeasible example creation
            NewBoundries = BoundryRescaler.Rescale(Feasibles);

            // Generate infeasible examples
            var infeasibles = new List <double[]>();

            while (infeasibles.Count < GlobalVariables.InfeasibleExamplesCount)
            {
                // Generate points within new boundry
                var x = GenerateLimitedInputs(GlobalVariables.Dimensions, NewBoundries);

                // Calculate probability density function value for given input
                var probability = distribution.ProbabilityDensityFunction(x);

                // Check if the value is smaller than smallest probability of all feasible examples
                if (probability > minimalProbability)
                {
                    continue;
                }

                infeasibles.Add(x);

                TrainingData.Add(x.ExtendArrayWithValue(0.0));

                Output.Add(0);
            }

            _dal.TrainingInfeasibleExamples = infeasibles.ToArray();
            _dal.TrainingData = TrainingData.ToArray();
        }