示例#1
0
        private double[] PredictNextValue(IGaussianMixtureModelState model, IPredictionRequest request, double[][] trainingSet)
        {
            var N      = trainingSet.Length;
            var K      = trainingSet[0].Length;
            var result = new double[K];

            var yesterday           = trainingSet[N - 1];
            var yesterdayLikelihood = LogLikelihood.Calculate(new[] { yesterday }, model.Mixture.Coefficients, GetCovariances(model.Mixture.Components), GetMeans(model.Mixture.Components));

            Debug.WriteLine("Yesterday Likelihood : " + new Vector(yesterday) + " : " + yesterdayLikelihood + " ");

            var guessess       = FindMostSimilarObservations(model, trainingSet, yesterdayLikelihood, request.Tolerance);
            var bestGuessPlace = FindBestGuess(request, guessess);
            var tomorrow       = trainingSet[bestGuessPlace.PlaceInSequence + 1];
            var mostSimilar    = trainingSet[bestGuessPlace.PlaceInSequence];

            for (var k = 0; k < K; k++)
            {
                if (bestGuessPlace.PlaceInSequence != trainingSet.Length)
                {
                    result[k] = yesterday[k] + (tomorrow[k] - mostSimilar[k]);
                }
            }

            Debug.WriteLine("Predicted : " + new Vector(result) + " : " + LogLikelihood.Calculate(new[] { result }, model.Mixture.Coefficients, GetCovariances(model.Mixture.Components), GetMeans(model.Mixture.Components)));

            return(result);
        }
示例#2
0
        /// <summary>
        ///     Fits Distribution parameters to given observation vector
        /// </summary>
        /// <param name="observations">Vector</param>
        /// <param name="weights">Vector</param>
        /// <param name="likelihood">Output value</param>
        /// <returns>IDistribution</returns>
        public override IDistribution Evaluate(double[][] observations, double[] weights, out double likelihood)
        {
            var mean       = Utils.Mean(observations, weights);
            var covariance = Utils.Covariance(observations, mean, weights);
            var result     = new NormalDistribution(mean, covariance);

            likelihood = LogLikelihood.Calculate(observations, result.Covariance, result.Mean);
            return(result);
        }
示例#3
0
文件: Mixture.cs 项目: xg86/HmmDotNet
        public IDistribution Evaluate(double[][] observations, double[] weights, double treshold, out double likelihood)
        {
            // Calculate Maximization of the Likelihood
            _stepsTillConvirgence = 0;
            var convirged = false;
            var pdf       = (IMultivariateDistribution[])_components.Clone();
            var K         = _coefficients.Length;
            var N         = observations.Length;

            //// Initialize pi, covariance matrix and mean
            var pi = (double[])_coefficients.Clone();

            weights = weights.Product(N);

            likelihood = LogLikelihood.Calculate(observations, _coefficients, GetCovariances(pdf), GetMeans(pdf));

            while (!convirged)
            {
                //// Expectation
                var gamma = CalculateGamma(observations, weights, pdf, pi);
                //// Maximization
                pi = CalculatePi(gamma);
                //// For each component train new distribution function
                for (var k = 0; k < K; k++)
                {
                    var mean       = CalculateMean(gamma, observations, k);
                    var covariance = CalculateCovariance(gamma, observations, mean, k);
                    pdf[k] = new NormalDistribution(mean, covariance);
                }

                //// Check treshold
                var newLikelihood = LogLikelihood.Calculate(observations, _coefficients, GetCovariances(pdf), GetMeans(pdf));
                if (double.IsNaN(newLikelihood) || double.IsInfinity(newLikelihood))
                {
                    throw new ApplicationException("EM algorithm does not convirged");
                }

                convirged  = (newLikelihood - likelihood) <= treshold;
                likelihood = newLikelihood;
                _stepsTillConvirgence++;
            }

            return(new Mixture <IMultivariateDistribution>(pi, pdf));
        }
示例#4
0
        private IList <ObservationWithLikelihood <double[]> > FindMostSimilarObservations(IGaussianMixtureModelState model, double[][] trainingSet, double yesterdayLikelihood, double tolerance)
        {
            var N        = trainingSet.Length;
            var guessess = new List <ObservationWithLikelihood <double[]> >();

            for (var n = N - 2; n > 0; n--)
            {
                var x          = new[] { trainingSet[n] };
                var likelihood = LogLikelihood.Calculate(x, model.Mixture.Coefficients, GetCovariances(model.Mixture.Components), GetMeans(model.Mixture.Components));
                //Debug.Write((new Vector(observations[n])).ToString() + " : " + likelihood + " " + Environment.NewLine);

                if (Math.Abs(yesterdayLikelihood) - tolerance < Math.Abs(likelihood) && Math.Abs(yesterdayLikelihood) + tolerance > Math.Abs(likelihood))
                {
                    guessess.Add(new ObservationWithLikelihood <double[]>()
                    {
                        LogLikelihood = likelihood, Observation = trainingSet[n], PlaceInSequence = n - 1
                    });
                }
            }

            return(guessess);
        }