예제 #1
0
        /// <summary>
        ///   Fits the underlying distribution to a given set of observations.
        /// </summary>
        ///
        /// <param name="observations">The array of observations to fit the model against. The array
        /// elements can be either of type double (for univariate data) or
        /// type double[] (for multivariate data).</param>
        /// <param name="weights">The weight vector containing the weight for each of the samples.</param>
        /// <param name="options">Optional arguments which may be used during fitting, such
        /// as regularization constants and additional parameters.</param>
        ///
        public void Fit(double[] observations, double[] weights, MixtureOptions options)
        {
            var pdf = new IFittableDistribution <double> [coefficients.Length];

            for (int i = 0; i < components.Length; i++)
            {
                pdf[i] = (IFittableDistribution <double>)components[i];
            }

            bool log = (options != null && options.Logarithm);

            if (log)
            {
                if (weights != null)
                {
                    throw new ArgumentException("The model fitting algorithm does not"
                                                + " currently support different weights when the logarithm option"
                                                + " is enabled. To avoid this exception, pass 'null' as the second"
                                                + " parameter's value when calling this method.");
                }

                var em = new LogExpectationMaximization <double>(coefficients, pdf);

                if (options != null)
                {
                    em.InnerOptions           = options.InnerOptions;
                    em.Convergence.Iterations = options.Iterations;
                    em.Convergence.Tolerance  = options.Threshold;
                }

                em.Compute(observations);
            }
            else
            {
                var em = new ExpectationMaximization <double>(coefficients, pdf);

                if (options != null)
                {
                    em.InnerOptions           = options.InnerOptions;
                    em.Convergence.Iterations = options.Iterations;
                    em.Convergence.Tolerance  = options.Threshold;
                }

                em.Compute(observations, weights);
            }

            for (int i = 0; i < components.Length; i++)
            {
                cache[i] = components[i] = (T)pdf[i];
            }

            this.initialize();
        }
예제 #2
0
        public void Learn(List <DataElement> data)
        {
            kMeans _kMeans = new kMeans(d);

            _gaussianDistributions = _kMeans.GetGroupRepresentatives(data, threshold);

            List <double[][]> mfccVectors = new List <double[][]>();

            for (int i = 0; i < data.Count; i++)
            {
                mfccVectors.Add(data[i].MfccCoefficients);
            }

            em = new ExpectationMaximization(_gaussianDistributions.ToArray(), mfccVectors);

            int iterations = 0;

            while (iterations < maxIterations)
            {
                List <DataElement> prevMeans = new List <DataElement>();

                foreach (var gd in _gaussianDistributions)
                {
                    prevMeans.Add(new DataElement(gd.Mean));
                }

                em.Compute();

                List <DataElement> currMeans = new List <DataElement>();

                foreach (var gd in _gaussianDistributions)
                {
                    currMeans.Add(new DataElement(gd.Mean));
                }
                List <double> meansDistances = MathHelper.CountMeansDistances(prevMeans, currMeans);

                if (!MathHelper.meansDistancesChanged(threshold, meansDistances))
                {
                    break;
                }

                iterations++;
            }

            AfterLearning = true;
        }
예제 #3
0
 /// <summary>
 ///   Computes the log-likelihood of the distribution
 ///   for a given set of observations.
 /// </summary>
 ///
 public double LogLikelihood(double[] observations)
 {
     return(ExpectationMaximization <double> .LogLikelihood(coefficients, cache, observations));
 }
예제 #4
0
 /// <summary>
 ///   Computes the log-likelihood of the distribution
 ///   for a given set of observations.
 /// </summary>
 ///
 public double LogLikelihood(double[][] observations, double[] weights)
 {
     return(ExpectationMaximization <double[]> .LogLikelihood(coefficients, cache,
                                                              observations, weights, weights.Sum()));
 }