Пример #1
0
        public void Gamma_RightLeftNotNormalized_EachEntryMatrixIsSummedToOne()
        {
            var delta = 3;
            var numberOfStatesRightLeft = 4;
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = numberOfStatesRightLeft, Delta = delta, Emissions = CreateEmissions(observations, numberOfStatesRightLeft)
            });                                                                                                                                                                                                                                   //new HiddenMarkovModelState<NormalDistribution>(numberOfStatesRightLeft, delta, CreateEmissions(observations, numberOfStatesRightLeft)) { LogNormalized = false };

            model.Normalized = false;
            var baseParameters = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = Helper.Convert(observations), Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);
            var @params        = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = Helper.Convert(observations),
                Model        = model,
                Normalized   = model.Normalized
            };
            var estimator = new GammaEstimator <NormalDistribution>();

            for (int i = 0; i < observations.Length; i++)
            {
                Assert.AreEqual(1.0d, Math.Round(estimator.Estimate(@params)[i].Sum(), 5), string.Format("Failed Gamma Component [{1}] : {0}", estimator.Estimate(@params)[i], i));
            }
        }
Пример #2
0
        public void Sigma_ErgodicAndObservationAndLogNormalized_SigmaCalculated()
        {
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var sequence     = Helper.Convert(observations);
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = NumberOfStates, Emissions = CreateEmissions(observations, NumberOfStates)
            });                                                                                                                                                                                                  //new HiddenMarkovModelState<NormalDistribution>(NumberOfStates, CreateEmissions(observations, NumberOfStates)) { LogNormalized = true };

            model.Normalized = true;
            var baseParameters = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = sequence, Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);

            var @params = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = sequence,
                Model        = model
            };
            var gammaEstimator = new GammaEstimator <NormalDistribution>();
            var muEstimator    = new MuMultivariateEstimator <NormalDistribution>();
            var estimator      = new SigmaMultivariateEstimator <NormalDistribution>();
            var muParams       = new MuEstimationParameters <NormalDistribution>
            {
                Gamma        = gammaEstimator.Estimate(@params),
                Model        = model,
                Normalized   = model.Normalized,
                Observations = Helper.Convert(observations)
            };

            Assert.IsNotNull(estimator);
            var sigma = estimator.Estimate(new SigmaEstimationParameters <NormalDistribution, double[][]>(muParams)
            {
                Mean = muEstimator.Estimate(muParams)
            });

            for (int n = 0; n < NumberOfStates; n++)
            {
                for (int i = 0; i < sequence[0].Dimention; i++)
                {
                    for (int j = 0; j < sequence[0].Dimention; j++)
                    {
                        Assert.IsTrue(sigma[n][i, j] > 0, string.Format("Failed Sigma {0}", sigma[n][i, j]));
                    }
                }
            }
        }
        public void Estimate_KsiGammaParameters_TransitionProbabilityMatrixCalculatedAndReturned()
        {
            const int numberOfStates = 2;

            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2010, 12, 18), new DateTime(2011, 12, 18));
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = numberOfStates, Emissions = CreateEmissions(observations, numberOfStates)
            });

            model.Normalized = true;
            var observationsList = new List <IObservation>();

            for (var i = 0; i < observations.Length; i++)
            {
                observationsList.Add(new Observation(observations[i], i.ToString()));
            }
            var baseEstimator = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = Helper.Convert(observations), Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseEstimator);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseEstimator);
            var @params        = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = observationsList,
                Model        = model,
                Normalized   = model.Normalized
            };
            var gammaEstimator = new GammaEstimator <NormalDistribution>();
            var ksiEstimator   = new KsiEstimator <NormalDistribution>();
            var gamma          = gammaEstimator.Estimate(@params);
            var ksi            = ksiEstimator.Estimate(@params);
            var estimator      = new TransitionProbabilityEstimator <NormalDistribution>();
            var parameters     = new KsiGammaTransitionProbabilityMatrixParameters <NormalDistribution>
            {
                Model      = model,
                Ksi        = ksi,
                Gamma      = gamma,
                T          = observations.Length,
                Normalized = model.Normalized
            };

            var estimatedTransitionProbabilityMatrix = estimator.Estimate(parameters);

            Assert.AreEqual(1d, Math.Round(estimatedTransitionProbabilityMatrix[0][0] + estimatedTransitionProbabilityMatrix[0][1], 5));
            Assert.AreEqual(1d, Math.Round(estimatedTransitionProbabilityMatrix[1][0] + estimatedTransitionProbabilityMatrix[1][1], 5));
        }
Пример #4
0
        public void GammaEstimator_ParametersAndNormalized_GammaEstimatorCreated()
        {
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = NumberOfStates, Emissions = CreateEmissions(observations, NumberOfStates)
            });                                                                                                                                                                                                  //new HiddenMarkovModelState<NormalDistribution>(NumberOfStates) { LogNormalized = true };

            model.Normalized = true;

            var estimator = new GammaEstimator <NormalDistribution>();

            Assert.IsNotNull(estimator);
        }
Пример #5
0
        public void Mu_MultivariateAndRightLeftAndNotNormalized_MuCalculated()
        {
            var delta        = 3;
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var sequence     = Helper.Convert(observations);
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = NumberOfStatesRightLeft, Delta = delta, Emissions = CreateEmissions(observations, NumberOfStatesRightLeft)
            });                                                                                                                                                                                                                                   //new HiddenMarkovModelState<NormalDistribution>(NumberOfStatesRightLeft, delta, CreateEmissions(observations, NumberOfStatesRightLeft)) { LogNormalized = false };

            model.Normalized = false;
            var baseParameters = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = sequence, Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);
            var @params        = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = sequence,
                Model        = model
            };

            var gammaEstimator = new GammaEstimator <NormalDistribution>();
            var estimator      = new MuMultivariateEstimator <NormalDistribution>();
            var muParams       = new MuEstimationParameters <NormalDistribution>
            {
                Gamma        = gammaEstimator.Estimate(@params),
                Model        = model,
                Normalized   = model.Normalized,
                Observations = sequence
            };

            Assert.IsNotNull(estimator);
            var mu = estimator.Estimate(muParams);

            for (int i = 0; i < NumberOfStatesRightLeft; i++)
            {
                for (int j = 0; j < sequence[0].Dimention; j++)
                {
                    Assert.IsTrue(mu[i][j] > 0, string.Format("Failed Mu {0}", mu[i][j]));
                }
            }
        }
Пример #6
0
        // Quote an estimator expression
        public static IExpression QuoteEstimator(object value)
        {
            // todo: remove and use Construction attributes
            IExpression expr = null;

            if (value is BernoulliEstimator)
            {
                BernoulliEstimator g = (BernoulliEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is DirichletEstimator)
            {
                DirichletEstimator g = (DirichletEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote((g.Dimension)));
            }
            else if (value is DiscreteEstimator)
            {
                DiscreteEstimator g = (DiscreteEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote((g.Dimension)));
            }
            else if (value is GammaEstimator)
            {
                GammaEstimator g = (GammaEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is GaussianEstimator)
            {
                GaussianEstimator g = (GaussianEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is VectorGaussianEstimator)
            {
                VectorGaussianEstimator g = (VectorGaussianEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote(g.Dimension));
            }
            else if (value is WishartEstimator)
            {
                WishartEstimator g = (WishartEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote(g.Dimension));
            }

            return(expr);
        }
Пример #7
0
        public void Gamma_RightLeftAndNotNormalized_GammaCalculated()
        {
            var delta = 3;
            var numberOfStatesRightLeft = 4;
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = numberOfStatesRightLeft, Delta = delta, Emissions = CreateEmissions(observations, numberOfStatesRightLeft)
            });                                                                                                                                                                                                                                   //new HiddenMarkovModelState<NormalDistribution>(numberOfStatesRightLeft, delta, CreateEmissions(observations, numberOfStatesRightLeft)) { LogNormalized = false };

            model.Normalized = false;
            var baseParameters = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = Helper.Convert(observations), Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);

            var @params = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = Helper.Convert(observations),
                Model        = model,
                Normalized   = model.Normalized
            };
            var estimator = new GammaEstimator <NormalDistribution>();

            Assert.IsNotNull(estimator);
            for (int i = 0; i < observations.Length; i++)
            {
                for (int j = 0; j < numberOfStatesRightLeft; j++)
                {
                    Assert.IsTrue(estimator.Estimate(@params)[i][j] >= 0 && estimator.Estimate(@params)[i][j] <= 1, string.Format("Failed Gamma [{1}][{2}] : {0}", estimator.Estimate(@params)[i][j], i, j));
                }
            }
        }
Пример #8
0
        public void Gamma_ErgodicAndLogNormalized_GammaCalculated()
        {
            var util         = new TestDataUtils();
            var observations = util.GetSvcData(util.FTSEFilePath, new DateTime(2011, 11, 18), new DateTime(2011, 12, 18));
            var model        = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <NormalDistribution>()
            {
                NumberOfStates = NumberOfStates, Emissions = CreateEmissions(observations, NumberOfStates)
            });                                                                                                                                                                                                  //new HiddenMarkovModelState<NormalDistribution>(NumberOfStates) { LogNormalized = true };

            model.Normalized = true;
            var baseParameters = new BasicEstimationParameters <NormalDistribution> {
                Model = model, Observations = Helper.Convert(observations), Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <NormalDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <NormalDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);

            var @params = new AdvancedEstimationParameters <NormalDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = Helper.Convert(observations),
                Model        = model,
                Normalized   = model.Normalized
            };
            var estimator = new GammaEstimator <NormalDistribution>();

            Assert.IsNotNull(estimator);
            for (int i = 0; i < observations.Length; i++)
            {
                for (int j = 0; j < NumberOfStates; j++)
                {
                    Assert.IsTrue(estimator.Estimate(@params)[i][j] < 0, string.Format("Failed Gamma {0}", estimator.Estimate(@params)[i][j]));
                }
            }
        }
Пример #9
0
        // Commented out since this test is very slow and does not have any assertions
        //[Fact]
        internal void GaussianFromMeanAndVarianceVaryTruePrec()
        {
            double mean = 2.0;

            var Nvar = Variable.Observed <int>(50);
            var n    = new Range(Nvar);

            var noisePrecision = 10;

            var variancePriors = new double[] { 1, 5, 10 }.Select(i => Gamma.FromShapeAndRate(i, i)).ToArray();

            var varPriorVar = Variable.Observed(new Gamma());

            var variance_EP = Variable <double> .Random(varPriorVar);

            var x_ep  = GaussianFromMeanAndVarianceSampleSizeModel(variance_EP, n, true, noisePrecision);
            var ie_EP = new InferenceEngine();

            ie_EP.ShowWarnings = false;
            ie_EP.ShowProgress = false;

            var ev          = Variable.Bernoulli(.5);
            var modelBlock  = Variable.If(ev);
            var variance_MC = Variable <double> .Random(varPriorVar);

            var x_mc = GaussianFromMeanAndVarianceSampleSizeModel(variance_MC, n, true, noisePrecision);

            modelBlock.CloseBlock();
            var ie_MC = new InferenceEngine();

            ie_MC.ShowWarnings = false;
            ie_MC.ShowProgress = false;

            var precision_VMP = Variable.GammaFromShapeAndRate(1, 1);
            var x_vmp         = GaussianFromMeanAndVarianceSampleSizeModel(precision_VMP, n, false, noisePrecision);
            var ie_VMP        = new InferenceEngine(new VariationalMessagePassing());

            ie_VMP.ShowWarnings = false;
            ie_VMP.ShowProgress = false;

            var ie_EP_prec = new InferenceEngine();

            ie_EP_prec.ShowWarnings = false;
            ie_EP_prec.ShowProgress = false;

            Console.WriteLine("var " + variancePriors.Select(i => "EP" + i.Shape).Aggregate((i, j) => i + " " + j) + " "
                              + variancePriors.Select(i => "MC" + i.Shape).Aggregate((i, j) => i + " " + j) + " EPp VMP EMP");
            for (int j = 0; j < 10; j++)
            {
                Rand.Restart(2);
                //int N = (int)Math.Pow(10, j + 1);
                double variance = System.Math.Exp(-5 + j);

                var data = Enumerable.Range(0, Nvar.ObservedValue).Select(i => Gaussian.Sample(mean, 1.0 / variance) + Gaussian.Sample(0, noisePrecision)).ToArray();

                x_ep.ObservedValue  = data;
                x_mc.ObservedValue  = data;
                x_vmp.ObservedValue = data;

                var epMeans = variancePriors.Select(i =>
                {
                    double res = double.NaN;
                    varPriorVar.ObservedValue = i;
                    try
                    {
                        res = ie_EP.Infer <Gamma>(variance_EP).GetMean();
                    }
                    catch
                    {
                    }
                    ;
                    return(res);
                }).ToArray();

                var mcMeans = variancePriors.Select(p =>
                {
                    double varianceSample = 2;
                    var ge = new GammaEstimator();
                    varPriorVar.ObservedValue    = p;
                    Converter <double, double> f = vari =>
                    {
                        variance_MC.ObservedValue = vari;
                        return(ie_MC.Infer <Bernoulli>(ev).LogOdds);
                    };
                    int burnin  = 1000, thin = 5, numSamples = 1000;
                    var samples = new double[numSamples];
                    for (int i = 0; i < burnin; i++)
                    {
                        varianceSample = NonconjugateVMP2Tests.SliceSampleUnivariate(varianceSample, f, lower_bound: 0);
                    }
                    for (int i = 0; i < numSamples; i++)
                    {
                        for (int k = 0; k < thin; k++)
                        {
                            varianceSample = NonconjugateVMP2Tests.SliceSampleUnivariate(varianceSample, f, lower_bound: 0);
                        }
                        samples[i] = varianceSample;
                        ge.Add(varianceSample);
                    }
                    return(samples.Sum() / numSamples);
                }).ToArray();


                double epMean2 = double.NaN;
                try
                {
                    epMean2 = ie_EP_prec.Infer <Gamma>(precision_VMP).GetMeanInverse();
                }
                catch
                {
                }
                ;

                double vmpMean = ie_VMP.Infer <Gamma>(precision_VMP).GetMeanInverse();

                var empiricalMean = data.Sum() / data.Length;
                var empVar        = data.Sum(o => o * o) / data.Length - empiricalMean * empiricalMean;
                Console.Write("{0:G3} ", variance);
                foreach (var i in epMeans)
                {
                    Console.Write("{0:G3} ", i);
                }
                foreach (var i in mcMeans)
                {
                    Console.Write("{0:G3} ", i);
                }
                Console.WriteLine("{0:G3} {1:G3} {2:G3}", epMean2, vmpMean, empVar);
            }
        }
Пример #10
0
        // Commented out since this test is very slow and does not have any assertions
        //[Fact]
        internal void GaussianFromMeanAndVarianceVaryNoise()
        {
            double mean = 2.0, variance = 2.0;

            var Nvar = Variable.Observed <int>(1);
            var n    = new Range(Nvar);

            var noisePrecision = Variable.Array <double>(n);

            var variance_EP = Variable.GammaFromShapeAndRate(1, 1);
            var x_ep        = GaussianPlusNoiseModel(variance_EP, n, true, noisePrecision);
            var ie_EP       = new InferenceEngine();

            ie_EP.ShowWarnings = false;
            ie_EP.ShowProgress = false;

            var ev          = Variable.Bernoulli(.5);
            var modelBlock  = Variable.If(ev);
            var variance_MC = Variable.GammaFromShapeAndRate(1, 1);
            var x_mc        = GaussianPlusNoiseModel(variance_MC, n, true, noisePrecision);

            modelBlock.CloseBlock();
            var ie_MC = new InferenceEngine();

            ie_MC.ShowWarnings = false;
            ie_MC.ShowProgress = false;

            var precision_VMP = Variable.GammaFromShapeAndRate(1, 1);
            var x_vmp         = GaussianPlusNoiseModel(precision_VMP, n, false, noisePrecision);
            var ie_VMP        = new InferenceEngine(new VariationalMessagePassing());

            ie_VMP.ShowWarnings = false;
            ie_VMP.ShowProgress = false;

            Console.WriteLine("N EP MC VMP EMP");
            for (int j = 0; j < 10; j++)
            {
                Rand.Restart(2);
                //int N = (int)Math.Pow(10, j + 1);
                int N = 10 * (j + 1);

                var noiseP = Enumerable.Range(0, N).Select(i =>
                {
                    if (i % 3 == 0)
                    {
                        return(.1);
                    }
                    if (i % 3 == 1)
                    {
                        return(1);
                    }
                    else
                    {
                        return(10);
                    }
                }).ToArray();

                var data = noiseP.Select(i => Gaussian.Sample(mean, 1.0 / variance) + Gaussian.Sample(0, i)).ToArray();

                Nvar.ObservedValue           = N;
                noisePrecision.ObservedValue = noiseP;
                x_ep.ObservedValue           = data;
                x_mc.ObservedValue           = data;
                x_vmp.ObservedValue          = data;

                double epMean = double.NaN;
                try
                {
                    epMean = ie_EP.Infer <Gamma>(variance_EP).GetMean();
                }
                catch
                {
                }
                ;

                double vmpMean        = ie_VMP.Infer <Gamma>(precision_VMP).GetMeanInverse();
                double varianceSample = 2;
                var    ge             = new GammaEstimator();

                Converter <double, double> f = vari =>
                {
                    variance_MC.ObservedValue = vari;
                    return(ie_MC.Infer <Bernoulli>(ev).LogOdds);
                };
                int burnin = 1000, thin = 5, numSamples = 1000;
                var samples = new double[numSamples];
                for (int i = 0; i < burnin; i++)
                {
                    varianceSample = NonconjugateVMP2Tests.SliceSampleUnivariate(varianceSample, f, lower_bound: 0);
                }
                for (int i = 0; i < numSamples; i++)
                {
                    for (int k = 0; k < thin; k++)
                    {
                        varianceSample = NonconjugateVMP2Tests.SliceSampleUnivariate(varianceSample, f, lower_bound: 0);
                    }
                    samples[i] = varianceSample;
                    ge.Add(varianceSample);
                }
                double mcMean        = samples.Sum() / numSamples;
                var    empiricalMean = data.Sum() / data.Length;
                var    empVar        = data.Sum(o => o * o) / data.Length - empiricalMean * empiricalMean;
                Console.WriteLine(N + " " + epMean + " " + mcMean + " " + vmpMean + " " + empVar);
            }
        }
Пример #11
0
        private void GaussianFromMeanAndVarianceTest(double mm, double vm, double mx, double vx, double a, double b)
        {
            Variable <bool>   evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock           block    = Variable.If(evidence);
            Variable <double> mean     = Variable.GaussianFromMeanAndVariance(mm, vm).Named("mean");
            Variable <double> variance = Variable.GammaFromShapeAndRate(a, b).Named("variance");
            Variable <double> x        = Variable.GaussianFromMeanAndVariance(mean, variance).Named("x");

            Variable.ConstrainEqualRandom(x, new Gaussian(mx, vx));
            block.CloseBlock();

            InferenceEngine engine = new InferenceEngine();

            engine.Compiler.RecommendedQuality = QualityBand.Experimental;
            double   evExpected;
            Gaussian xExpected;
            Gamma    vExpected;

            if (a == 1 || a == 2)
            {
                double c = System.Math.Sqrt(2 * b);
                double m = c * (mx - mm);
                double v = c * c * (vx + vm);
                double Z, mu, m2u;
                VarianceGammaTimesGaussianMoments(a, m, v, out Z, out mu, out m2u);
                evExpected = System.Math.Log(Z * c);
                double vu = m2u - mu * mu;
                double r  = Double.IsPositiveInfinity(vx) ? 1.0 : vx / (vx + vm);
                double mp = r * (mu / c + mm) + (1 - r) * mx;
                double vp = r * r * vu / (c * c) + r * vm;
                xExpected = new Gaussian(mp, vp);
                double Zplus1, Zplus2;
                VarianceGammaTimesGaussianMoments(a + 1, m, v, out Zplus1, out mu, out m2u);
                VarianceGammaTimesGaussianMoments(a + 2, m, v, out Zplus2, out mu, out m2u);
                double vmp  = a / b * Zplus1 / Z;
                double vm2p = a * (a + 1) / (b * b) * Zplus2 / Z;
                double vvp  = vm2p - vmp * vmp;
                vExpected = Gamma.FromMeanAndVariance(vmp, vvp);
            }
            else
            {
                int n = 1000000;
                GaussianEstimator est   = new GaussianEstimator();
                GammaEstimator    vEst  = new GammaEstimator();
                Gaussian          xLike = new Gaussian(mx, vx);
                for (int i = 0; i < n; i++)
                {
                    double m       = Gaussian.Sample(mm, 1 / vm);
                    double v       = Rand.Gamma(a) / b;
                    double xSample = Gaussian.Sample(m, 1 / v);
                    double weight  = System.Math.Exp(xLike.GetLogProb(xSample));
                    est.Add(xSample, weight);
                    vEst.Add(v, weight);
                }
                evExpected = System.Math.Log(est.mva.Count / n);
                xExpected  = est.GetDistribution(new Gaussian());
                vExpected  = vEst.GetDistribution(new Gamma());
            }
            double evActual = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Gaussian xActual = engine.Infer <Gaussian>(x);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
            Gamma vActual = engine.Infer <Gamma>(variance);

            Console.WriteLine("variance = {0} should be {1}", vActual, vExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-10) < 1e-4);
            Assert.True(xExpected.MaxDiff(xActual) < 1e-4);
            Assert.True(vExpected.MaxDiff(vActual) < 1e-4);
        }
Пример #12
0
		public static Gamma RateAverageConditional([SkipIfUniform] Gamma y, double shape, Gamma rate)
		{
			if (y.IsPointMass) return RateAverageConditional(y.Point, shape);
			// q(y) = y^(a1-1) exp(-b1 y)
			// q(b) = b^(a2-1) exp(-b2 b)
			// f(y,b) = y^(a-1) b^a exp(-by)
			// q(y) q(b) f(y,b) = y^(a+a1-2) b^(a+a2-1) exp(-b2 b - b1 y - b y)
			// int over y = b^(a+a2-1) exp(-b2 b) / (b + b1)^(a+a1-1)
			// this is dist of ratio Ga(a+1,1)/Ga(a1-2,1) times Ga(a2,b2) pdf
			double max = shape/y.GetMean()*10;
			int n = 1000;
			double inc = max/n;
			GammaEstimator est = new GammaEstimator();
			for (int i = 0; i < n; i++) {
				double b = (i+1)*inc;
				double logp = (shape + rate.Shape - 1)*Math.Log(b) - b*rate.Rate - (shape + y.Shape - 1)*Math.Log(b + y.Rate);
				est.Add(b, Math.Exp(logp));
			}
			Gamma post = est.GetDistribution(new Gamma());
			//Console.WriteLine("y = {0}, shape = {1}, rate = {2}, post = {3}", y, shape, rate, post);
			return post / rate;
			//throw new NotSupportedException(NotSupportedMessage);
		}
Пример #13
0
		public static Gamma YAverageConditional(Gamma y, double shape, [SkipIfUniform] Gamma rate)
		{
			if (rate.IsPointMass) return YAverageConditional(shape, rate.Point);
			// q(b) = b^(a2-1) exp(-b2 b)
			// f(y,b) = y^(a-1) b^a exp(-by)
			// q(b) f(y,b) = y^(a-1) b^(a+a2-1) exp(-b (y+b2))
			// int over b = y^(a-1) / (y + b2)^(a+a2) = ratio of Ga(a,1)/Ga(a2,1/b2)
			double max = shape/rate.GetMean()*10;
			int n = 1000;
			double inc = max/n;
			GammaEstimator est = new GammaEstimator();
			for (int i = 0; i < n; i++) {
				double x = (i+1)*inc;
				double logp = (shape + y.Shape - 2)*Math.Log(x) - x*y.Rate - (shape + rate.Shape)*Math.Log(x + rate.Rate);
				est.Add(x, Math.Exp(logp));
			}
			Gamma post = est.GetDistribution(new Gamma());
			return post / rate;
			//throw new NotSupportedException(NotSupportedMessage);
		}
        public IHiddenMarkovModel <Mixture <IMultivariateDistribution> > Run(int maxIterations, double likelihoodTolerance)
        {
            // Initialize responce object
            var forwardBackward = new ForwardBackward(Normalized);

            do
            {
                maxIterations--;
                if (!_estimatedModel.Likelihood.EqualsTo(0))
                {
                    _currentModel = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <Mixture <IMultivariateDistribution> > {
                        Pi = _estimatedPi, TransitionProbabilityMatrix = _estimatedTransitionProbabilityMatrix, Emissions = _estimatedEmissions
                    });                                                                                                                                                                                                                                                 //new HiddenMarkovModelState<Mixture<IMultivariateDistribution>>(_estimatedPi, _estimatedTransitionProbabilityMatrix, _estimatedEmissions) { LogNormalized = _estimatedModel.LogNormalized };
                    _currentModel.Normalized = Normalized;
                    _currentModel.Likelihood = _estimatedModel.Likelihood;
                }
                // Run Forward-Backward procedure
                forwardBackward.RunForward(_observations, _currentModel);
                forwardBackward.RunBackward(_observations, _currentModel);
                // Calculate Gamma and Xi
                var @params = new MixtureSigmaEstimationParameters <Mixture <IMultivariateDistribution> >
                {
                    Alpha              = forwardBackward.Alpha,
                    Beta               = forwardBackward.Beta,
                    Observations       = _observations,
                    Model              = _currentModel,
                    Normalized         = _currentModel.Normalized,
                    L                  = _currentModel.Emission[0].Components.Length,
                    ObservationWeights = _observationWeights
                };
                _gammaEstimator = new GammaEstimator <Mixture <IMultivariateDistribution> >();
                _ksiEstimator   = new KsiEstimator <Mixture <IMultivariateDistribution> >();
                var mixtureCoefficientsEstimator = new MixtureCoefficientsEstimator <Mixture <IMultivariateDistribution> >();
                var mixtureMuEstimator           = new MixtureMuEstimator <Mixture <IMultivariateDistribution> >();    // Mean
                var mixtureSigmaEstimator        = new MixtureSigmaEstimator <Mixture <IMultivariateDistribution> >(); // Covariance
                var mixtureGammaEstimator        = new MixtureGammaEstimator <Mixture <IMultivariateDistribution> >();
                @params.Gamma           = _gammaEstimator.Estimate(@params);
                @params.GammaComponents = mixtureGammaEstimator.Estimate(@params);


                EstimatePi(_gammaEstimator.Estimate(@params));
                // TODO : weights for A
                EstimateTransitionProbabilityMatrix(_gammaEstimator.Estimate(@params), _ksiEstimator.Estimate(@params), _observationWeights, _observations.Count);

                for (var n = 0; n < _currentModel.N; n++)
                {
                    var mixturesComponents = _currentModel.Emission[n].Coefficients.Length;
                    var distributions      = new IMultivariateDistribution[mixturesComponents];
                    // Calculate coefficients for state n
                    // TODO : weights for W
                    var coefficients = mixtureCoefficientsEstimator.Estimate(@params)[n];
                    if (Normalized)
                    {
                        mixtureCoefficientsEstimator.Denormalize();
                    }
                    // TODO : weights Mu
                    @params.Mu = mixtureMuEstimator.Estimate(@params);
                    for (var l = 0; l < mixturesComponents; l++)
                    {
                        // TODO : weights Sigma
                        distributions[l] = new NormalDistribution(mixtureMuEstimator.Estimate(@params)[n, l], mixtureSigmaEstimator.Estimate(@params)[n, l]);
                    }
                    _estimatedEmissions[n] = new Mixture <IMultivariateDistribution>(coefficients, distributions);
                }
                _estimatedModel = HiddenMarkovModelStateFactory.GetState(new ModelCreationParameters <Mixture <IMultivariateDistribution> > {
                    Pi = _estimatedPi, TransitionProbabilityMatrix = _estimatedTransitionProbabilityMatrix, Emissions = _estimatedEmissions
                });
                _estimatedModel.Normalized = Normalized;
                _estimatedModel.Likelihood = forwardBackward.RunForward(_observations, _estimatedModel);
                _likelihoodDelta           = Math.Abs(Math.Abs(_currentModel.Likelihood) - Math.Abs(_estimatedModel.Likelihood));
                Debug.WriteLine("Iteration {3} , Current {0}, Estimate {1} Likelihood delta {2}", _currentModel.Likelihood, _estimatedModel.Likelihood, _likelihoodDelta, maxIterations);
            }while (_currentModel != _estimatedModel && maxIterations > 0 && _likelihoodDelta > likelihoodTolerance);

            return(_estimatedModel);
        }
Пример #15
0
        public void Sample(Options options, Matrix data)
        {
            if (options.numParams > 2)
            {
                throw new Exception("numParams > 2");
            }
            int numStudents  = data.Rows;
            int numQuestions = data.Cols;
            // initialize the sampler at the mean of the priors (not sampling from the priors)
            double abilityMean        = abilityMeanPrior.GetMean();
            double abilityPrec        = abilityPrecPrior.GetMean();
            double difficultyMean     = difficultyMeanPrior.GetMean();
            double difficultyPrec     = difficultyPrecPrior.GetMean();
            double discriminationMean = discriminationMeanPrior.GetMean();
            double discriminationPrec = discriminationPrecPrior.GetMean();

            double[]            ability             = new double[numStudents];
            double[]            difficulty          = new double[numQuestions];
            List <double>[]     difficultySamples   = new List <double> [numQuestions];
            GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                difficultyEstimator[question] = new GaussianEstimator();
                difficultySamples[question]   = new List <double>();
                if (difficultyObserved != null)
                {
                    difficulty[question] = difficultyObserved[question];
                    difficultyEstimator[question].Add(difficultyObserved[question]);
                    difficultySamples[question].Add(difficultyObserved[question]);
                }
            }
            List <double>[]     abilitySamples   = new List <double> [numStudents];
            GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length];
            for (int student = 0; student < abilityEstimator.Length; student++)
            {
                abilityEstimator[student] = new GaussianEstimator();
                abilitySamples[student]   = new List <double>();
                if (abilityObserved != null)
                {
                    ability[student] = abilityObserved[student];
                    abilityEstimator[student].Add(abilityObserved[student]);
                    abilitySamples[student].Add(abilityObserved[student]);
                }
            }
            double[]         discrimination          = new double[numQuestions];
            List <double>[]  discriminationSamples   = new List <double> [numQuestions];
            GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                discriminationEstimator[question] = new GammaEstimator();
                discriminationSamples[question]   = new List <double>();
                discrimination[question]          = 1;
                if (discriminationObserved != null)
                {
                    discrimination[question] = discriminationObserved[question];
                    discriminationEstimator[question].Add(discriminationObserved[question]);
                    discriminationSamples[question].Add(discriminationObserved[question]);
                }
            }
            responseProbMean = new Matrix(numStudents, numQuestions);
            int    niters           = options.numberOfSamples;
            int    burnin           = options.burnIn;
            double logisticVariance = Math.PI * Math.PI / 3;
            double shape            = 4.5;
            Gamma  precPrior        = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance);

            precPrior      = Gamma.PointMass(1);
            double[,] prec = new double[numStudents, numQuestions];
            double[,] x    = new double[numStudents, numQuestions];
            int numRejected = 0, numAttempts = 0;

            for (int iter = 0; iter < niters; iter++)
            {
                for (int student = 0; student < numStudents; student++)
                {
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // sample prec given ability, difficulty, x
                        // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2)
                        Gamma  precPost = precPrior;
                        double xMean    = (ability[student] - difficulty[question]) * discrimination[question];
                        double delta    = x[student, question] - xMean;
                        Gamma  like     = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta);
                        precPost.SetToProduct(precPost, like);
                        prec[student, question] = precPost.Sample();
                        // sample x given ability, difficulty, prec, data
                        // using an independence chain MH
                        bool     y      = (data[student, question] > 0);
                        double   sign   = y ? 1.0 : -1.0;
                        Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]);
                        // we want to sample from xPrior*I(x>0)
                        // instead we sample from xPost
                        Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior);
                        double   oldx  = x[student, question];
                        double   newx  = xPost.Sample();
                        numAttempts++;
                        if (newx * sign < 0)
                        {
                            newx = oldx; // rejected
                            numRejected++;
                        }
                        else
                        {
                            // importance weights
                            double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx);
                            double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx);
                            // acceptance ratio
                            double paccept = Math.Exp(neww - oldw);
                            if (paccept < 1 && Rand.Double() > paccept)
                            {
                                newx = oldx; // rejected
                                numRejected++;
                            }
                        }
                        x[student, question] = newx;
                        if (iter >= burnin)
                        {
                            double responseProb = MMath.Logistic(xMean);
                            responseProbMean[student, question] += responseProb;
                        }
                    }
                }
                if (abilityObserved == null)
                {
                    // sample ability given difficulty, prec, x
                    for (int student = 0; student < numStudents; student++)
                    {
                        Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec);
                        for (int question = 0; question < numQuestions; question++)
                        {
                            // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec)
                            Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, abilityLike);
                        }
                        ability[student] = post.Sample();
                        if (iter >= burnin)
                        {
                            abilityEstimator[student].Add(post);
                            abilitySamples[student].Add(ability[student]);
                        }
                    }
                }
                // sample difficulty given ability, prec, x
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec);
                    for (int student = 0; student < numStudents; student++)
                    {
                        // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec)
                        if (discrimination[question] > 0)
                        {
                            Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, like);
                        }
                    }
                    difficulty[question] = post.Sample();
                    if (iter >= burnin)
                    {
                        //if (difficulty[question] > 100)
                        //    Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]);
                        difficultyEstimator[question].Add(post);
                        difficultySamples[question].Add(difficulty[question]);
                    }
                }
                if (options.numParams > 1 && discriminationObserved == null)
                {
                    // sample discrimination given ability, difficulty, prec, x
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // moment-matching on the prior
                        Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1));
                        Gaussian post        = approxPrior;
                        for (int student = 0; student < numStudents; student++)
                        {
                            // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2)
                            double delta = ability[student] - difficulty[question];
                            if (delta > 0)
                            {
                                Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta);
                                post.SetToProduct(post, like);
                            }
                        }
                        TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity);
                        double            olddisc   = discrimination[question];
                        double            newdisc   = postTrunc.Sample();
                        // importance weights
                        Func <double, double> priorLogProb = delegate(double d)
                        {
                            double logd = Math.Log(d);
                            return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd);
                        };
                        double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc);
                        double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc);
                        // acceptance ratio
                        double paccept = Math.Exp(neww - oldw);
                        if (paccept < 1 && Rand.Double() > paccept)
                        {
                            // rejected
                        }
                        else
                        {
                            discrimination[question] = newdisc;
                        }
                        if (iter >= burnin)
                        {
                            discriminationEstimator[question].Add(discrimination[question]);
                            discriminationSamples[question].Add(discrimination[question]);
                        }
                    }
                }
                // sample abilityMean given ability, abilityPrec
                Gaussian abilityMeanPost = abilityMeanPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec);
                    abilityMeanPost *= like;
                }
                abilityMean = abilityMeanPost.Sample();
                // sample abilityPrec given ability, abilityMean
                Gamma abilityPrecPost = abilityPrecPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean);
                    abilityPrecPost *= like;
                }
                abilityPrec = abilityPrecPost.Sample();
                // sample difficultyMean given difficulty, difficultyPrec
                Gaussian difficultyMeanPost = difficultyMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec);
                    difficultyMeanPost *= like;
                }
                difficultyMean = difficultyMeanPost.Sample();
                // sample difficultyPrec given difficulty, difficultyMean
                Gamma difficultyPrecPost = difficultyPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean);
                    difficultyPrecPost *= like;
                }
                difficultyPrec = difficultyPrecPost.Sample();
                // sample discriminationMean given discrimination, discriminationPrec
                Gaussian discriminationMeanPost = discriminationMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec);
                    discriminationMeanPost *= like;
                }
                discriminationMean = discriminationMeanPost.Sample();
                // sample discriminationPrec given discrimination, discriminationMean
                Gamma discriminationPrecPost = discriminationPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean);
                    discriminationPrecPost *= like;
                }
                discriminationPrec = discriminationPrecPost.Sample();
                //if (iter % 1 == 0)
                //    Console.WriteLine("iter = {0}", iter);
            }
            //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec);
            //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec);
            int numSamplesUsed = niters - burnin;

            responseProbMean.Scale(1.0 / numSamplesUsed);
            //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts);
            difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            abilityPost    = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            if (options.numParams > 1)
            {
                discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma()));
            }
            abilityCred    = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples);
            difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples);
            bool saveSamples = false;

            if (saveSamples)
            {
                using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat"))
                {
                    int q = 11;
                    writer.Write("difficulty", difficultySamples[q]);
                    writer.Write("discrimination", discriminationSamples[q]);
                }
            }
        }
Пример #16
0
        public void GammaEstimator_ABBAObservations_NotNormalizedTest()
        {
            var startDistribution = new[] { 0.85, 0.15 };
            // s = 0, t = 1
            var tpm = new double[2][];

            tpm[0] = new[] { 0.3, 0.7 };
            tpm[1] = new[] { 0.1, 0.9 };

            var observations = new List <IObservation>
            {
                new Observation(new double[] { 0 }, "A"),
                new Observation(new double[] { 1 }, "B"),
                new Observation(new double[] { 1 }, "B"),
                new Observation(new double[] { 0 }, "A")
            };

            var emissions = new DiscreteDistribution[2];

            emissions[0] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.4, 0.6 });
            emissions[1] = new DiscreteDistribution(new double[] { 0, 1 }, new[] { 0.5, 0.5 });

            var model = HiddenMarkovModelFactory.GetModel(new ModelCreationParameters <DiscreteDistribution>()
            {
                Pi = startDistribution, TransitionProbabilityMatrix = tpm, Emissions = emissions
            });                                                                                                                                                                                     //new HiddenMarkovModel(startDistribution, tpm, emissions) { LogNormalized = false };

            model.Normalized = false;
            var baseParameters = new BasicEstimationParameters <DiscreteDistribution> {
                Model = model, Observations = observations, Normalized = model.Normalized
            };
            var alphaEstimator = new AlphaEstimator <DiscreteDistribution>();
            var alpha          = alphaEstimator.Estimate(baseParameters);
            var betaEstimator  = new BetaEstimator <DiscreteDistribution>();
            var beta           = betaEstimator.Estimate(baseParameters);

            var @params = new AdvancedEstimationParameters <DiscreteDistribution>
            {
                Alpha        = alpha,
                Beta         = beta,
                Observations = observations,
                Model        = model
            };
            var gammaEstimator = new GammaEstimator <DiscreteDistribution>();
            var gamma          = gammaEstimator.Estimate(@params);

            Assert.AreEqual(0.8258482510939813, gamma[0][0]);
            Assert.AreEqual(0.17415174890601867, gamma[0][1]);
            Assert.AreEqual(1d, gamma[0].Sum());

            Assert.AreEqual(0.3069572858154187, gamma[1][0]);
            Assert.AreEqual(0.69304271418458141, gamma[1][1]);
            Assert.AreEqual(1d, gamma[1].Sum());

            Assert.AreEqual(0.17998403530294202, gamma[2][0]);
            Assert.AreEqual(0.82001596469705806, gamma[2][1]);
            Assert.AreEqual(1d, gamma[2].Sum());

            Assert.AreEqual(0.112893449466425, gamma[3][0]);
            Assert.AreEqual(0.887106550533575, gamma[3][1]);
            Assert.AreEqual(1d, gamma[2].Sum());
        }