Exemplo n.º 1
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoubleOp"]/message_doc[@name="DoubleAverageConditional(Gaussian, Discrete)"]/*'/>
        public static Gaussian DoubleAverageConditional(Gaussian Double, Discrete Integer)
        {
            if (Integer.IsPointMass)
            {
                return(Gaussian.PointMass(Factor.Double(Integer.Point)));
            }
            // Z = sum_i int_x q(x) delta(x - i) q(i) dx
            //   = sum_i q(x=i) q(i)
            double max = double.NegativeInfinity;

            for (int i = 0; i < Integer.Dimension; i++)
            {
                double logp = Double.GetLogProb(i);
                if (logp > max)
                {
                    max = logp;
                }
            }
            if (double.IsNegativeInfinity(max))
            {
                throw new AllZeroException();
            }
            GaussianEstimator est = new GaussianEstimator();

            for (int i = 0; i < Integer.Dimension; i++)
            {
                double logp = Double.GetLogProb(i);
                est.Add(i, Integer[i] * System.Math.Exp(logp - max));
            }
            Gaussian result = est.GetDistribution(new Gaussian());

            result.SetToRatio(result, Double, ForceProper);
            return(result);
        }
Exemplo n.º 2
0
        public static void CalculateSMSEandMSLL(double[,] test, double[,] train, Gaussian[,] pred, out double SMSE, out double MSLL)
        {
            int D = test.GetLength(0);
            int N = test.GetLength(1);

            SMSE = 0;
            MSLL = 0;
            for (int d = 0; d < D; d++)
            {
                var testGaussian  = new GaussianEstimator();
                var trainGaussian = new GaussianEstimator();
                for (int n = 0; n < N; n++)
                {
                    testGaussian.Add(test[d, n]);
                    trainGaussian.Add(train[d, n]);
                }
                var    testG  = testGaussian.GetDistribution(new Gaussian());
                var    trainG = trainGaussian.GetDistribution(new Gaussian());
                double se     = 0;
                for (int n = 0; n < N; n++)
                {
                    MSLL += (-pred[d, n].GetLogProb(test[d, n]) - (-trainG.GetLogProb(test[d, n])));
                    double err = pred[d, n].GetMean() - test[d, n];
                    se += err * err;
                }
                SMSE += se / testG.GetVariance();
            }
            MSLL /= (double)(D * N);
            SMSE /= (double)(D * N);
        }
Exemplo n.º 3
0
        /// <summary>
        /// Initialises the parameters from data
        /// </summary>
        /// <param name="X">X data - initialises lengths</param>
        /// <param name="y">y data - initialises signal standard deviation</param>
        public void InitialiseFromData(IList <Vector> X, Vector y)
        {
            if (X.Count != y.Count)
            {
                throw new ArgumentException("X and y data counts don't match");
            }

            int ND = X.Count;
            int NL = X[0].Count;

            double[] logLengths = new double[NL];

            GaussianEstimator[] gex = new GaussianEstimator[NL];
            GaussianEstimator   gey = new GaussianEstimator();

            for (int l = 0; l < NL; l++)
            {
                gex[l] = new GaussianEstimator();
            }

            for (int d = 0; d < ND; d++)
            {
                Vector x = X[d];
                for (int l = 0; l < NL; l++)
                {
                    gex[l].Add(x[l]);
                }
                gey.Add(y[d]);
            }

            double dimMult = Math.Sqrt((double)NL);

            for (int l = 0; l < NL; l++)
            {
                Gaussian g      = gex[l].GetDistribution(new Gaussian());
                double   length = 0.5 * dimMult * Math.Sqrt(g.GetVariance());

                // If the variance is zero, set to some nominal value.
                // This is differnet from the situation where length
                // is very small
                if (length < 0.000000001)
                {
                    length = 1.0;
                }

                logLengths[l] = Math.Log(length);
            }
            double sd = Math.Sqrt(gey.GetDistribution(new Gaussian()).GetVariance());

            if (sd < 0.000000001)
            {
                sd = 1.0;
            }

            SetupParams(logLengths, Math.Log(sd));
        }
Exemplo n.º 4
0
        public void GaussianEstimatorInfinityTest()
        {
            foreach (var infinity in new[] { double.PositiveInfinity, double.NegativeInfinity })
            {
                var infiniteMeanGaussian = Gaussian.PointMass(infinity);
                var estimator            = new GaussianEstimator();

                estimator.Add(infiniteMeanGaussian);
                estimator.Add(infiniteMeanGaussian);

                var estimation = estimator.GetDistribution(new Gaussian());
                Assert.Equal(infiniteMeanGaussian, estimation);
            }
        }
        public static Gaussian MaxAPosterior(Gaussian max, Gaussian a, Gaussian b)
        {
            int n = 10000000;
            GaussianEstimator est = new GaussianEstimator();

            for (int i = 0; i < n; i++)
            {
                double aSample = a.Sample();
                double bSample = b.Sample();
                double logProb = max.GetLogProb(System.Math.Max(aSample, bSample));
                double weight  = System.Math.Exp(logProb);
                est.Add(aSample, weight);
            }
            return(est.GetDistribution(new Gaussian()));
        }
Exemplo n.º 6
0
        /// <summary>
        /// Initialises the parameters from data
        /// </summary>
        /// <param name="X">X data - initialises weight variances</param>
        public void InitialiseFromData(IList <Vector> X)
        {
            int ND = X.Count;
            int NL = X[0].Count;

            double[] logWeightVariances = new double[NL];

            GaussianEstimator[] gex = new GaussianEstimator[NL];
            for (int l = 0; l < NL; l++)
            {
                gex[l] = new GaussianEstimator();
            }

            for (int d = 0; d < ND; d++)
            {
                Vector x = X[d];
                for (int l = 0; l < NL; l++)
                {
                    gex[l].Add(x[l]);
                }
            }

            double dimMult = System.Math.Sqrt((double)NL);

            for (int l = 0; l < NL; l++)
            {
                Gaussian g      = gex[l].GetDistribution(new Gaussian());
                double   length = 0.5 * dimMult * System.Math.Sqrt(g.GetVariance());

                // If the variance is zero, set to some nominal value.
                // This is different from the situation where length
                // is very small
                if (length < 0.000000001)
                {
                    length = 1.0;
                }

                double wtSDev = 1.0 / length;


                logWeightVariances[l] = 2.0 * System.Math.Log(wtSDev);
            }
            double logBiasWeightVariance = -System.Math.Log((double)NL);

            SetupParams(logWeightVariances, logBiasWeightVariance);
        }
Exemplo n.º 7
0
        // Quote an estimator expression
        public static IExpression QuoteEstimator(object value)
        {
            // todo: remove and use Construction attributes
            IExpression expr = null;

            if (value is BernoulliEstimator)
            {
                BernoulliEstimator g = (BernoulliEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is DirichletEstimator)
            {
                DirichletEstimator g = (DirichletEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote((g.Dimension)));
            }
            else if (value is DiscreteEstimator)
            {
                DiscreteEstimator g = (DiscreteEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote((g.Dimension)));
            }
            else if (value is GammaEstimator)
            {
                GammaEstimator g = (GammaEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is GaussianEstimator)
            {
                GaussianEstimator g = (GaussianEstimator)value;
                expr = Builder.NewObject(value.GetType());
            }
            else if (value is VectorGaussianEstimator)
            {
                VectorGaussianEstimator g = (VectorGaussianEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote(g.Dimension));
            }
            else if (value is WishartEstimator)
            {
                WishartEstimator g = (WishartEstimator)value;
                expr = Builder.NewObject(value.GetType(), Quote(g.Dimension));
            }

            return(expr);
        }
        /// <summary>
        /// Initialises the parameters from data. The variance is
        /// set as the square of the inverse of the 'length' of the
        /// input feature. Note that the variance we are trying to set
        /// up here corresponds to the variance of the weight parameters
        /// in a linear model, not to the variance of the input feature.
        /// </summary>
        /// <param name="X">X data - initialises variances</param>
        public void InitialiseFromData(IList <Vector> X)
        {
            int ND = X.Count;
            int NL = X[0].Count;

            double[] logVars = new double[NL];

            GaussianEstimator[] gex = new GaussianEstimator[NL];
            for (int l = 0; l < NL; l++)
            {
                gex[l] = new GaussianEstimator();
            }

            for (int d = 0; d < ND; d++)
            {
                Vector x = X[d];
                for (int l = 0; l < NL; l++)
                {
                    gex[l].Add(x[l]);
                }
            }

            double dimMult = Math.Sqrt((double)NL);

            for (int l = 0; l < NL; l++)
            {
                Gaussian g      = gex[l].GetDistribution(new Gaussian());
                double   length = 0.5 * dimMult * Math.Sqrt(g.GetVariance());

                // If the variance is zero, set to some nominal value.
                // This is different from the situation where length
                // is very small
                if (length < 0.000000001)
                {
                    length = 1.0;
                }

                // variance = inv(sq(length))
                logVars[l] = -2.0 * Math.Log(length);
            }
            SetupParams(logVars);
        }
Exemplo n.º 9
0
        private Gaussian StudentIsPositiveExact(double mean, Gamma precPrior, out double evidence)
        {
            // importance sampling for true answer
            GaussianEstimator est = new GaussianEstimator();
            int nSamples          = 1000000;

            evidence = 0;
            for (int iter = 0; iter < nSamples; iter++)
            {
                double   precSample = precPrior.Sample();
                Gaussian xPrior     = Gaussian.FromMeanAndPrecision(mean, precSample);
                double   logWeight  = IsPositiveOp.LogAverageFactor(true, xPrior);
                evidence += System.Math.Exp(logWeight);
                double xSample = xPrior.Sample();
                if (xSample > 0)
                {
                    est.Add(xSample);
                }
            }
            evidence /= nSamples;
            return(est.GetDistribution(new Gaussian()));
        }
Exemplo n.º 10
0
        // (0.5,0.5):
        // weight distribution = Gaussian(-0.02787, 0.2454)
        // error rate = 0.0527452589744697 = 1221/23149
        // (1,1):
        // weight distribution = Gaussian(-0.03117, 0.3967)
        // error rate = 0.0522268780508877 = 1209/23149
        // (2,2):
        // weight distribution = Gaussian(-0.03522, 0.6794)
        // error rate = 0.0530476478465593 = 1228/23149
        // (10,10):
        // weight distribution = Gaussian(-0.05455, 2.96)
        // error rate = 0.0580586634411854 = 1344/23149

#if SUPPRESS_UNREACHABLE_CODE_WARNINGS
#pragma warning restore 162
#endif

        public static void Rcv1Test2()
        {
            GaussianArray   wPost;
            Gaussian        biasPost;
            BinaryFormatter serializer = new BinaryFormatter();

            //TODO: change path
            using (Stream stream = File.OpenRead(@"c:\Users\minka\Downloads\rcv1\weights.bin"))
            {
                wPost    = (GaussianArray)serializer.Deserialize(stream);
                biasPost = (Gaussian)serializer.Deserialize(stream);
            }
            if (true)
            {
                GaussianEstimator est = new GaussianEstimator();
                foreach (Gaussian item in wPost)
                {
                    est.Add(item.GetMean());
                }
                Console.WriteLine("weight distribution = {0}", est.GetDistribution(new Gaussian()));
            }
            var predict = new BpmPredict2();

            predict.SetPriors(wPost, biasPost);
            int count  = 0;
            int errors = 0;

            //TODO: change path
            foreach (Instance instance in new VwReader(@"c:\Users\minka\Downloads\rcv1\rcv1.test.vw.gz"))
            {
                bool yPred = predict.Predict(instance);
                if (yPred != instance.label)
                {
                    errors++;
                }
                count++;
            }
            Console.WriteLine("error rate = {0} = {1}/{2}", (double)errors / count, errors, count);
        }
Exemplo n.º 11
0
        /// <summary>
        /// Initialises the parameters from data. The variance is
        /// set as the square of the inverse of the 'length' of the
        /// input feature. Note that the variance we are trying to set
        /// up here corresponds to the variance of the weight parameters
        /// in a linear model, not to the variance of the input feature.
        /// </summary>
        /// <param name="X">X data - initialises variances</param>
        public void InitialiseFromData(IList<Vector> X)
        {
            int ND = X.Count;
            int NL = X[0].Count;
            double[] logVars = new double[NL];

            GaussianEstimator[] gex = new GaussianEstimator[NL];
            for (int l = 0; l < NL; l++)
            {
                gex[l] = new GaussianEstimator();
            }

            for (int d = 0; d < ND; d++)
            {
                Vector x = X[d];
                for (int l = 0; l < NL; l++)
                {
                    gex[l].Add(x[l]);
                }
            }

            double dimMult = Math.Sqrt((double)NL);
            for (int l = 0; l < NL; l++)
            {
                Gaussian g = gex[l].GetDistribution(new Gaussian());
                double length = 0.5 * dimMult * Math.Sqrt(g.GetVariance());

                // If the variance is zero, set to some nominal value.
                // This is different from the situation where length
                // is very small
                if (length < 0.000000001)
                    length = 1.0;

                // variance = inv(sq(length))
                logVars[l] = -2.0 * Math.Log(length);
            }
            SetupParams(logVars);
        }
Exemplo n.º 12
0
        /// <summary>
        /// Initialises the parameters from data
        /// </summary>
        /// <param name="X">X data - initialises lengths</param>
        /// <param name="y">y data - initialises signal standard deviation</param>
        public void InitialiseFromData(IList<Vector> X, Vector y)
        {
            if (X.Count != y.Count)
                throw new ArgumentException("X and y data counts don't match");

            int ND = X.Count;
            int NL = X[0].Count;
            double[] logLengths = new double[NL];

            GaussianEstimator[] gex = new GaussianEstimator[NL];
            GaussianEstimator gey = new GaussianEstimator();
            for (int l = 0; l < NL; l++)
            {
                gex[l] = new GaussianEstimator();
            }

            for (int d = 0; d < ND; d++)
            {
                Vector x = X[d];
                for (int l = 0; l < NL; l++)
                {
                    gex[l].Add(x[l]);
                }
                gey.Add(y[d]);
            }

            double dimMult = Math.Sqrt((double)NL);
            for (int l = 0; l < NL; l++)
            {
                Gaussian g = gex[l].GetDistribution(new Gaussian());
                double length = 0.5 * dimMult * Math.Sqrt(g.GetVariance());

                // If the variance is zero, set to some nominal value.
                // This is differnet from the situation where length
                // is very small
                if (length < 0.000000001)
                    length = 1.0;

                logLengths[l] = Math.Log(length);
            }
            double sd = Math.Sqrt(gey.GetDistribution(new Gaussian()).GetVariance());
            if (sd < 0.000000001)
                sd = 1.0;

            SetupParams(logLengths, Math.Log(sd));
        }
Exemplo n.º 13
0
		/// <summary>
		/// EP message to 'sample'
		/// </summary>
		/// <param name="sample">Incoming message from 'sample'.</param>
		/// <param name="mean">Incoming message from 'mean'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="precision">Incoming message from 'precision'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <returns>The outgoing EP message to the 'sample' argument</returns>
		/// <remarks><para>
		/// The outgoing message is a distribution matching the moments of 'sample' as the random arguments are varied.
		/// The formula is <c>proj[p(sample) sum_(mean,precision) p(mean,precision) factor(sample,mean,precision)]/p(sample)</c>.
		/// </para></remarks>
		/// <exception cref="ImproperMessageException"><paramref name="mean"/> is not a proper distribution</exception>
		/// <exception cref="ImproperMessageException"><paramref name="precision"/> is not a proper distribution</exception>
		public static Gaussian SampleAverageConditional(Gaussian sample, [SkipIfUniform] Gaussian mean, [SkipIfUniform] Gamma precision, Gamma to_precision)
		{
			if (sample.IsUniform() && precision.Shape <= 1.0) sample = Gaussian.FromNatural(1e-20, 1e-20);
			if (precision.IsPointMass) {
				return SampleAverageConditional(mean, precision.Point);
			} else if (sample.IsUniform()) {
				// for large vx, Z =approx N(mx; mm, vx+vm+E[1/prec])
				double mm,mv;
				mean.GetMeanAndVariance(out mm, out mv);
				// NOTE: this error may happen because sample didn't receive any message yet under the schedule.
				// Need to make the scheduler smarter to avoid this.
				if (precision.Shape <= 1.0) throw new ArgumentException("The posterior has infinite variance due to precision distributed as "+precision+" (shape <= 1).  Try using a different prior for the precision, with shape > 1.");
				return Gaussian.FromMeanAndVariance(mm, mv + precision.GetMeanInverse());
			} else if (mean.IsUniform() || precision.IsUniform()) {
				return Gaussian.Uniform();
			} else if (sample.IsPointMass) {
				// The correct answer here is not uniform, but rather a limit.  
				// However it doesn't really matter what we return since multiplication by a point mass 
				// always yields a point mass.
				return Gaussian.Uniform();
			} else if (!precision.IsProper()) {
				throw new ImproperMessageException(precision);
			} else {
				// The formula is int_prec int_mean N(x;mean,1/prec) p(x) p(mean) p(prec) =
				// int_prec N(x; mm, mv + 1/prec) p(x) p(prec) =
				// int_prec N(x; new xm, new xv) N(xm; mm, mv + xv + 1/prec) p(prec)
				// Let R = Prec/(Prec + mean.Prec)
				// new xv = inv(R*mean.Prec + sample.Prec)
				// new xm = xv*(R*mean.PM + sample.PM)

				// In the case where sample and mean are improper distributions, 
				// we must only consider values of prec for which (new xv > 0).
				// This happens when R*mean.Prec > -sample.Prec
				// As a function of Prec, R*mean.Prec has a singularity at Prec=-mean.Prec
				// This function is greater than a threshold when Prec is sufficiently small or sufficiently large.
				// Therefore we construct an interval of Precs to exclude from the integration.
				double xm, xv, mm, mv;
				sample.GetMeanAndVarianceImproper(out xm, out xv);
				mean.GetMeanAndVarianceImproper(out mm, out mv);
				double lowerBound = 0;
				double upperBound = Double.PositiveInfinity;
				bool precisionIsBetween = true;
				if (mean.Precision >= 0) {
					if (sample.Precision < -mean.Precision) throw new ImproperMessageException(sample);
					//lowerBound = -mean.Precision * sample.Precision / (mean.Precision + sample.Precision);
					lowerBound = -1.0 / (xv + mv);
				} else {  // mean.Precision < 0
					if (sample.Precision < 0) {
						precisionIsBetween = true;
						lowerBound = -1.0 / (xv + mv);
						upperBound = -mean.Precision;
					} else if (sample.Precision < -mean.Precision) {
						precisionIsBetween = true;
						lowerBound = 0;
						upperBound = -mean.Precision;
					} else {
						// in this case, the precision should NOT be in this interval.
						precisionIsBetween = false;
						lowerBound = -mean.Precision;
						lowerBound = -1.0 / (xv + mv);
					}
				}
				double[] nodes = new double[QuadratureNodeCount];
				double[] logWeights = new double[nodes.Length];
				Gamma precMarginal = precision*to_precision;
				QuadratureNodesAndWeights(precMarginal, nodes, logWeights);
				if (!to_precision.IsUniform()) {
					// modify the weights
					for (int i = 0; i < logWeights.Length; i++) {
						logWeights[i] += precision.GetLogProb(nodes[i]) - precMarginal.GetLogProb(nodes[i]);
					}
				}
				GaussianEstimator est = new GaussianEstimator();
				double shift = 0;
				for (int i = 0; i < nodes.Length; i++) {
					double newVar, newMean;
					Assert.IsTrue(nodes[i] > 0);
					if ((nodes[i] > lowerBound && nodes[i] < upperBound) != precisionIsBetween) continue;
					// the following works even if sample is uniform. (sample.Precision == 0)
					if (mean.IsPointMass) {
						// take limit mean.Precision -> Inf
						newVar = 1.0 / (nodes[i] + sample.Precision);
						newMean = newVar * (nodes[i] * mean.Point + sample.MeanTimesPrecision);
					} else {
						// mean.Precision < Inf
						double R = nodes[i] / (nodes[i] + mean.Precision);
						newVar = 1.0 / (R * mean.Precision + sample.Precision);
						newMean = newVar * (R * mean.MeanTimesPrecision + sample.MeanTimesPrecision);
					}
					double lp = Gaussian.GetLogProb(xm, mm, xv + mv + 1.0 / nodes[i]);
					if (i == 0) shift = lp;
					double f = Math.Exp(logWeights[i] + lp - shift);
					est.Add(Gaussian.FromMeanAndVariance(newMean, newVar), f);
				}
				double Z = est.mva.Count;
				if (double.IsNaN(Z)) throw new Exception("Z is nan");
				if (Z == 0.0) {
					throw new Exception("Quadrature found zero mass");
				}
				Gaussian result = est.GetDistribution(new Gaussian());
				if (modified && !sample.IsUniform()) {
					// heuristic method to avoid improper messages:
					// the message's mean must be E[mean] (regardless of context) and the variance is chosen to match the posterior mean when multiplied by context
					double sampleMean = result.GetMean();
					if (sampleMean != mm) {
						result.Precision = (sample.MeanTimesPrecision-sampleMean*sample.Precision)/(sampleMean - mm);
						if (result.Precision < 0) throw new Exception("internal: sampleMean is not between sample.Mean and mean.Mean");
						result.MeanTimesPrecision = result.Precision*mm;
					}
				} else {
					if (result.IsPointMass) throw new Exception("Quadrature found zero variance");
					result.SetToRatio(result, sample, ForceProper);
				}
				return result;
			}
		}
Exemplo n.º 14
0
        public void Sample(Options options, Matrix data)
        {
            if (options.numParams > 2)
            {
                throw new Exception("numParams > 2");
            }
            int numStudents  = data.Rows;
            int numQuestions = data.Cols;
            // initialize the sampler at the mean of the priors (not sampling from the priors)
            double abilityMean        = abilityMeanPrior.GetMean();
            double abilityPrec        = abilityPrecPrior.GetMean();
            double difficultyMean     = difficultyMeanPrior.GetMean();
            double difficultyPrec     = difficultyPrecPrior.GetMean();
            double discriminationMean = discriminationMeanPrior.GetMean();
            double discriminationPrec = discriminationPrecPrior.GetMean();

            double[]            ability             = new double[numStudents];
            double[]            difficulty          = new double[numQuestions];
            List <double>[]     difficultySamples   = new List <double> [numQuestions];
            GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                difficultyEstimator[question] = new GaussianEstimator();
                difficultySamples[question]   = new List <double>();
                if (difficultyObserved != null)
                {
                    difficulty[question] = difficultyObserved[question];
                    difficultyEstimator[question].Add(difficultyObserved[question]);
                    difficultySamples[question].Add(difficultyObserved[question]);
                }
            }
            List <double>[]     abilitySamples   = new List <double> [numStudents];
            GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length];
            for (int student = 0; student < abilityEstimator.Length; student++)
            {
                abilityEstimator[student] = new GaussianEstimator();
                abilitySamples[student]   = new List <double>();
                if (abilityObserved != null)
                {
                    ability[student] = abilityObserved[student];
                    abilityEstimator[student].Add(abilityObserved[student]);
                    abilitySamples[student].Add(abilityObserved[student]);
                }
            }
            double[]         discrimination          = new double[numQuestions];
            List <double>[]  discriminationSamples   = new List <double> [numQuestions];
            GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                discriminationEstimator[question] = new GammaEstimator();
                discriminationSamples[question]   = new List <double>();
                discrimination[question]          = 1;
                if (discriminationObserved != null)
                {
                    discrimination[question] = discriminationObserved[question];
                    discriminationEstimator[question].Add(discriminationObserved[question]);
                    discriminationSamples[question].Add(discriminationObserved[question]);
                }
            }
            responseProbMean = new Matrix(numStudents, numQuestions);
            int    niters           = options.numberOfSamples;
            int    burnin           = options.burnIn;
            double logisticVariance = Math.PI * Math.PI / 3;
            double shape            = 4.5;
            Gamma  precPrior        = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance);

            precPrior      = Gamma.PointMass(1);
            double[,] prec = new double[numStudents, numQuestions];
            double[,] x    = new double[numStudents, numQuestions];
            int numRejected = 0, numAttempts = 0;

            for (int iter = 0; iter < niters; iter++)
            {
                for (int student = 0; student < numStudents; student++)
                {
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // sample prec given ability, difficulty, x
                        // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2)
                        Gamma  precPost = precPrior;
                        double xMean    = (ability[student] - difficulty[question]) * discrimination[question];
                        double delta    = x[student, question] - xMean;
                        Gamma  like     = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta);
                        precPost.SetToProduct(precPost, like);
                        prec[student, question] = precPost.Sample();
                        // sample x given ability, difficulty, prec, data
                        // using an independence chain MH
                        bool     y      = (data[student, question] > 0);
                        double   sign   = y ? 1.0 : -1.0;
                        Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]);
                        // we want to sample from xPrior*I(x>0)
                        // instead we sample from xPost
                        Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior);
                        double   oldx  = x[student, question];
                        double   newx  = xPost.Sample();
                        numAttempts++;
                        if (newx * sign < 0)
                        {
                            newx = oldx; // rejected
                            numRejected++;
                        }
                        else
                        {
                            // importance weights
                            double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx);
                            double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx);
                            // acceptance ratio
                            double paccept = Math.Exp(neww - oldw);
                            if (paccept < 1 && Rand.Double() > paccept)
                            {
                                newx = oldx; // rejected
                                numRejected++;
                            }
                        }
                        x[student, question] = newx;
                        if (iter >= burnin)
                        {
                            double responseProb = MMath.Logistic(xMean);
                            responseProbMean[student, question] += responseProb;
                        }
                    }
                }
                if (abilityObserved == null)
                {
                    // sample ability given difficulty, prec, x
                    for (int student = 0; student < numStudents; student++)
                    {
                        Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec);
                        for (int question = 0; question < numQuestions; question++)
                        {
                            // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec)
                            Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, abilityLike);
                        }
                        ability[student] = post.Sample();
                        if (iter >= burnin)
                        {
                            abilityEstimator[student].Add(post);
                            abilitySamples[student].Add(ability[student]);
                        }
                    }
                }
                // sample difficulty given ability, prec, x
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec);
                    for (int student = 0; student < numStudents; student++)
                    {
                        // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec)
                        if (discrimination[question] > 0)
                        {
                            Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, like);
                        }
                    }
                    difficulty[question] = post.Sample();
                    if (iter >= burnin)
                    {
                        //if (difficulty[question] > 100)
                        //    Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]);
                        difficultyEstimator[question].Add(post);
                        difficultySamples[question].Add(difficulty[question]);
                    }
                }
                if (options.numParams > 1 && discriminationObserved == null)
                {
                    // sample discrimination given ability, difficulty, prec, x
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // moment-matching on the prior
                        Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1));
                        Gaussian post        = approxPrior;
                        for (int student = 0; student < numStudents; student++)
                        {
                            // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2)
                            double delta = ability[student] - difficulty[question];
                            if (delta > 0)
                            {
                                Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta);
                                post.SetToProduct(post, like);
                            }
                        }
                        TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity);
                        double            olddisc   = discrimination[question];
                        double            newdisc   = postTrunc.Sample();
                        // importance weights
                        Func <double, double> priorLogProb = delegate(double d)
                        {
                            double logd = Math.Log(d);
                            return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd);
                        };
                        double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc);
                        double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc);
                        // acceptance ratio
                        double paccept = Math.Exp(neww - oldw);
                        if (paccept < 1 && Rand.Double() > paccept)
                        {
                            // rejected
                        }
                        else
                        {
                            discrimination[question] = newdisc;
                        }
                        if (iter >= burnin)
                        {
                            discriminationEstimator[question].Add(discrimination[question]);
                            discriminationSamples[question].Add(discrimination[question]);
                        }
                    }
                }
                // sample abilityMean given ability, abilityPrec
                Gaussian abilityMeanPost = abilityMeanPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec);
                    abilityMeanPost *= like;
                }
                abilityMean = abilityMeanPost.Sample();
                // sample abilityPrec given ability, abilityMean
                Gamma abilityPrecPost = abilityPrecPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean);
                    abilityPrecPost *= like;
                }
                abilityPrec = abilityPrecPost.Sample();
                // sample difficultyMean given difficulty, difficultyPrec
                Gaussian difficultyMeanPost = difficultyMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec);
                    difficultyMeanPost *= like;
                }
                difficultyMean = difficultyMeanPost.Sample();
                // sample difficultyPrec given difficulty, difficultyMean
                Gamma difficultyPrecPost = difficultyPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean);
                    difficultyPrecPost *= like;
                }
                difficultyPrec = difficultyPrecPost.Sample();
                // sample discriminationMean given discrimination, discriminationPrec
                Gaussian discriminationMeanPost = discriminationMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec);
                    discriminationMeanPost *= like;
                }
                discriminationMean = discriminationMeanPost.Sample();
                // sample discriminationPrec given discrimination, discriminationMean
                Gamma discriminationPrecPost = discriminationPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean);
                    discriminationPrecPost *= like;
                }
                discriminationPrec = discriminationPrecPost.Sample();
                //if (iter % 1 == 0)
                //    Console.WriteLine("iter = {0}", iter);
            }
            //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec);
            //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec);
            int numSamplesUsed = niters - burnin;

            responseProbMean.Scale(1.0 / numSamplesUsed);
            //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts);
            difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            abilityPost    = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            if (options.numParams > 1)
            {
                discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma()));
            }
            abilityCred    = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples);
            difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples);
            bool saveSamples = false;

            if (saveSamples)
            {
                using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat"))
                {
                    int q = 11;
                    writer.Write("difficulty", difficultySamples[q]);
                    writer.Write("discrimination", discriminationSamples[q]);
                }
            }
        }
Exemplo n.º 15
0
        private void GaussianFromMeanAndVarianceTest(double mm, double vm, double mx, double vx, double a, double b)
        {
            Variable <bool>   evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock           block    = Variable.If(evidence);
            Variable <double> mean     = Variable.GaussianFromMeanAndVariance(mm, vm).Named("mean");
            Variable <double> variance = Variable.GammaFromShapeAndRate(a, b).Named("variance");
            Variable <double> x        = Variable.GaussianFromMeanAndVariance(mean, variance).Named("x");

            Variable.ConstrainEqualRandom(x, new Gaussian(mx, vx));
            block.CloseBlock();

            InferenceEngine engine = new InferenceEngine();

            engine.Compiler.RecommendedQuality = QualityBand.Experimental;
            double   evExpected;
            Gaussian xExpected;
            Gamma    vExpected;

            if (a == 1 || a == 2)
            {
                double c = System.Math.Sqrt(2 * b);
                double m = c * (mx - mm);
                double v = c * c * (vx + vm);
                double Z, mu, m2u;
                VarianceGammaTimesGaussianMoments(a, m, v, out Z, out mu, out m2u);
                evExpected = System.Math.Log(Z * c);
                double vu = m2u - mu * mu;
                double r  = Double.IsPositiveInfinity(vx) ? 1.0 : vx / (vx + vm);
                double mp = r * (mu / c + mm) + (1 - r) * mx;
                double vp = r * r * vu / (c * c) + r * vm;
                xExpected = new Gaussian(mp, vp);
                double Zplus1, Zplus2;
                VarianceGammaTimesGaussianMoments(a + 1, m, v, out Zplus1, out mu, out m2u);
                VarianceGammaTimesGaussianMoments(a + 2, m, v, out Zplus2, out mu, out m2u);
                double vmp  = a / b * Zplus1 / Z;
                double vm2p = a * (a + 1) / (b * b) * Zplus2 / Z;
                double vvp  = vm2p - vmp * vmp;
                vExpected = Gamma.FromMeanAndVariance(vmp, vvp);
            }
            else
            {
                int n = 1000000;
                GaussianEstimator est   = new GaussianEstimator();
                GammaEstimator    vEst  = new GammaEstimator();
                Gaussian          xLike = new Gaussian(mx, vx);
                for (int i = 0; i < n; i++)
                {
                    double m       = Gaussian.Sample(mm, 1 / vm);
                    double v       = Rand.Gamma(a) / b;
                    double xSample = Gaussian.Sample(m, 1 / v);
                    double weight  = System.Math.Exp(xLike.GetLogProb(xSample));
                    est.Add(xSample, weight);
                    vEst.Add(v, weight);
                }
                evExpected = System.Math.Log(est.mva.Count / n);
                xExpected  = est.GetDistribution(new Gaussian());
                vExpected  = vEst.GetDistribution(new Gamma());
            }
            double evActual = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Gaussian xActual = engine.Infer <Gaussian>(x);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
            Gamma vActual = engine.Infer <Gamma>(variance);

            Console.WriteLine("variance = {0} should be {1}", vActual, vExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-10) < 1e-4);
            Assert.True(xExpected.MaxDiff(xActual) < 1e-4);
            Assert.True(vExpected.MaxDiff(vActual) < 1e-4);
        }
Exemplo n.º 16
0
        public void BugsDyes()
        {
            // Dyes: variance components model
            // http://users.aims.ac.za/~mackay/BUGS/Examples/Dyes.html
            //model
            //{
            //    for( i in 1 : batches ) {
            //        m[i] ~ dnorm(theta, tau.btw)
            //        for( j in 1 : samples ) {
            //            y[i , j] ~ dnorm(m[i], tau.with)
            //        }
            //    }
            //    sigma2.with <- 1 / tau.with
            //    sigma2.btw <- 1 / tau.btw
            //    tau.with ~ dgamma(0.001, 0.001)
            //    tau.btw ~ dgamma(0.001, 0.001)
            //    theta ~ dnorm(0.0, 1.0E-10)
            //}
            //list(batches = 6, samples = 5,
            //     y = structure(
            //      .Data = c(1545, 1440, 1440, 1520, 1580,
            //                1540, 1555, 1490, 1560, 1495,
            //                1595, 1550, 1605, 1510, 1560,
            //                1445, 1440, 1595, 1465, 1545,
            //                1595, 1630, 1515, 1635, 1625,
            //                1520, 1455, 1450, 1480, 1445), .Dim = c(6, 5)))

            Rand.Restart(12347);
            double[,] yData = new double[, ]
            {
                { 1545, 1440, 1440, 1520, 1580 },
                { 1540, 1555, 1490, 1560, 1495 },
                { 1595, 1550, 1605, 1510, 1560 },
                { 1445, 1440, 1595, 1465, 1545 },
                { 1595, 1630, 1515, 1635, 1625 },
                { 1520, 1455, 1450, 1480, 1445 }
            };

            var   theta      = Variable.GaussianFromMeanAndPrecision(0.0, 1.0E-10).Named("theta");
            var   tauWithin  = Variable.GammaFromShapeAndRate(0.001, 0.001).Named("tauWithin");
            var   tauBetween = Variable.GammaFromShapeAndRate(0.001, 0.001).Named("tauBetween");
            Range i          = new Range(yData.GetLength(0)).Named("i");
            Range j          = new Range(yData.GetLength(1)).Named("j");
            var   m          = Variable.Array <double>(i).Named("m");
            var   y          = Variable.Array <double>(i, j).Named("y");

            using (Variable.ForEach(i))
            {
                m[i] = Variable.GaussianFromMeanAndPrecision(theta, tauBetween);
                using (Variable.ForEach(j))
                    y[i, j] = Variable.GaussianFromMeanAndPrecision(m[i], tauWithin);
            }
            y.ObservedValue = yData;
            var engine = new InferenceEngine(new GibbsSampling());

            engine.NumberOfIterations = 50000;
            engine.ShowProgress       = false;

            tauWithin.InitialiseTo(Gamma.FromShapeAndRate(1.0, 1.0));
            //tauBetween.InitialiseTo(Gamma.FromShapeAndRate(1.0, 1.0));
            theta.InitialiseTo(Gaussian.FromMeanAndPrecision(1500, 10.0));

            var thetaPost = engine.Infer <Gaussian>(theta);
            // We want the mean and variance of (varianceWithin, varianceBetween).  These can only be obtained from the raw samples.
            IList <double>    tauWithinSamples  = engine.Infer <IList <double> >(tauWithin, QueryTypes.Samples);
            IList <double>    tauBetweenSamples = engine.Infer <IList <double> >(tauBetween, QueryTypes.Samples);
            GaussianEstimator varianceWithinEst = new GaussianEstimator();

            foreach (double t in tauWithinSamples)
            {
                varianceWithinEst.Add(1 / t);
            }
            Gaussian          varianceWithinPost = varianceWithinEst.GetDistribution(new Gaussian());
            GaussianEstimator varianceBetweenEst = new GaussianEstimator();

            foreach (double t in tauBetweenSamples)
            {
                varianceBetweenEst.Add(1 / t);
            }
            Gaussian varianceBetweenPost = varianceBetweenEst.GetDistribution(new Gaussian());

            Gaussian thetaExpected           = Gaussian.FromMeanAndVariance(1528, 478.5);
            Gaussian varianceWithinExpected  = Gaussian.FromMeanAndVariance(3010, 1.203e+06);
            Gaussian varianceBetweenExpected = Gaussian.FromMeanAndVariance(2272, 2.069e+07);

            Console.WriteLine("theta = {0} should be {1}", thetaPost, thetaExpected);
            Console.WriteLine("varianceWithin = {0} should be {1}", varianceWithinPost, varianceWithinExpected);
            Console.WriteLine("varianceBetween = {0} should be {1}", varianceBetweenPost, varianceBetweenExpected);
            Assert.True(thetaExpected.MaxDiff(thetaPost) < 1e-1);
            Assert.True(varianceWithinExpected.MaxDiff(varianceWithinPost) < 2e-4);
            Assert.True(varianceBetweenExpected.MaxDiff(varianceBetweenPost) < 2e-4);
        }
Exemplo n.º 17
0
        private void BugsRats(bool initialiseAlpha, bool initialiseAlphaC)
        {
            Rand.Restart(0);
            double precOfGaussianPrior   = 1.0E-6;
            double shapeRateOfGammaPrior = 0.02; // smallest choice that will avoid zeros

            double meanOfBetaPrior  = 0.0;
            double meanOfAlphaPrior = 0.0;

            // The model
            int    N    = RatsHeightData.GetLength(0);
            int    T    = RatsHeightData.GetLength(1);
            double xbar = 22.0;

            double[] xDataZeroMean = new double[RatsXData.Length];
            for (int i = 0; i < RatsXData.Length; i++)
            {
                xDataZeroMean[i] = RatsXData[i] - xbar;
            }
            Range r = new Range(N).Named("N");
            Range w = new Range(T).Named("T");
            VariableArray2D <double> y        = Variable.Observed <double>(RatsHeightData, r, w).Named("y");
            VariableArray <double>   x        = Variable.Observed <double>(xDataZeroMean, w).Named("x");
            Variable <double>        tauC     = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("tauC");
            Variable <double>        alphaC   = Variable.GaussianFromMeanAndPrecision(meanOfAlphaPrior, precOfGaussianPrior).Named("alphaC");
            Variable <double>        alphaTau = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("alphaTau");
            Variable <double>        betaC    = Variable.GaussianFromMeanAndPrecision(meanOfBetaPrior, precOfGaussianPrior).Named("betaC");
            Variable <double>        betaTau  = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("betaTau");
            VariableArray <double>   alpha    = Variable.Array <double>(r).Named("alpha");

            alpha[r] = Variable.GaussianFromMeanAndPrecision(alphaC, alphaTau).ForEach(r);
            VariableArray <double> beta = Variable.Array <double>(r).Named("beta");

            beta[r] = Variable.GaussianFromMeanAndPrecision(betaC, betaTau).ForEach(r);
            VariableArray2D <double> mu    = Variable.Array <double>(r, w).Named("mu");
            VariableArray2D <double> betaX = Variable.Array <double>(r, w).Named("betax");

            betaX[r, w] = beta[r] * x[w];
            mu[r, w]    = alpha[r] + betaX[r, w];
            y[r, w]     = Variable.GaussianFromMeanAndPrecision(mu[r, w], tauC);
            Variable <double> alpha0 = (alphaC - xbar * betaC).Named("alpha0");

            InferenceEngine ie;
            GibbsSampling   gs = new GibbsSampling();
            // Initialise both alpha and beta together.
            // Initialising only alpha (or only beta) is not reliable because you could by chance get a large betaTau and small tauC to start,
            // at which point beta and alphaC become garbage, leading to alpha becoming garbage on the next iteration.
            bool initialiseBeta  = initialiseAlpha;
            bool initialiseBetaC = initialiseAlphaC;

            if (initialiseAlpha)
            {
                Gaussian[] alphaInit = new Gaussian[N];
                for (int i = 0; i < N; i++)
                {
                    alphaInit[i] = Gaussian.FromMeanAndPrecision(250.0, 1.0);
                }
                alpha.InitialiseTo(Distribution <double> .Array(alphaInit));
            }
            if (initialiseBeta)
            {
                Gaussian[] betaInit = new Gaussian[N];
                for (int i = 0; i < N; i++)
                {
                    betaInit[i] = Gaussian.FromMeanAndPrecision(6.0, 1.0);
                }
                beta.InitialiseTo(Distribution <double> .Array(betaInit));
            }
            if (initialiseAlphaC)
            {
                alphaC.InitialiseTo(Gaussian.FromMeanAndVariance(250.0, 1.0));
            }
            if (initialiseBetaC)
            {
                betaC.InitialiseTo(Gaussian.FromMeanAndVariance(6.0, 1.0));
            }
            if (false)
            {
                //tauC.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1));
                //alphaTau.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1));
                //betaTau.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1));
            }
            if (!initialiseAlpha && !initialiseBeta && !initialiseAlphaC && !initialiseBetaC)
            {
                gs.BurnIn = 1000;
            }
            ie = new InferenceEngine(gs);
            ie.ShowProgress         = false;
            ie.ModelName            = "BugsRats";
            ie.NumberOfIterations   = 4000;
            ie.OptimiseForVariables = new List <IVariable>()
            {
                alphaC, betaC, alpha0, tauC
            };
            betaC.AddAttribute(QueryTypes.Marginal);
            betaC.AddAttribute(QueryTypes.Samples);
            alpha0.AddAttribute(QueryTypes.Marginal);
            alpha0.AddAttribute(QueryTypes.Samples);
            tauC.AddAttribute(QueryTypes.Marginal);
            tauC.AddAttribute(QueryTypes.Samples);

            // Inference
            object   alphaCActual = ie.Infer(alphaC);
            Gaussian betaCMarg    = ie.Infer <Gaussian>(betaC);
            Gaussian alpha0Marg   = ie.Infer <Gaussian>(alpha0);
            Gamma    tauCMarg     = ie.Infer <Gamma>(tauC);

            // Check results against BUGS
            Gaussian betaCExpected     = new Gaussian(6.185, System.Math.Pow(0.1068, 2));
            Gaussian alpha0Expected    = new Gaussian(106.6, System.Math.Pow(3.625, 2));
            double   sigmaMeanExpected = 6.082;
            double   sigmaMean         = System.Math.Sqrt(1.0 / tauCMarg.GetMean());

            if (!initialiseAlpha && !initialiseAlphaC)
            {
                Debug.WriteLine("betaC = {0} should be {1}", betaCMarg, betaCExpected);
                Debug.WriteLine("alpha0 = {0} should be {1}", alpha0Marg, alpha0Expected);
            }
            Assert.True(GaussianDiff(betaCExpected, betaCMarg) < 0.1);
            Assert.True(GaussianDiff(alpha0Expected, alpha0Marg) < 0.1);
            Assert.True(MMath.AbsDiff(sigmaMeanExpected, sigmaMean, 0.1) < 0.1);

            IList <double> betaCSamples  = ie.Infer <IList <double> >(betaC, QueryTypes.Samples);
            IList <double> alpha0Samples = ie.Infer <IList <double> >(alpha0, QueryTypes.Samples);
            IList <double> tauCSamples   = ie.Infer <IList <double> >(tauC, QueryTypes.Samples);

            GaussianEstimator est = new GaussianEstimator();

            foreach (double sample in betaCSamples)
            {
                est.Add(sample);
            }
            Gaussian betaCMarg2 = est.GetDistribution(new Gaussian());

            Assert.True(GaussianDiff(betaCMarg, betaCMarg2) < 0.1);
        }
		public static Vector Weights(Gaussian A, Gaussian B, Gaussian to_A, Gaussian to_B)
		{
			if (A.IsPointMass) return Weights(A.Point, B);
			if (B.IsPointMass) return Weights(A, B.Point);
			A *= to_A;
			B *= to_B;
			double ma, va, mb, vb;
			A.GetMeanAndVariance(out ma, out va);
			B.GetMeanAndVariance(out mb, out vb);
			double ma2 = va + ma*ma;
			double mb2 = vb + mb*mb;
			Vector w = Vector.Zero(3);
			w[0] = ma2*mb;
			w[1] = mb2*ma;
			w[2] = ma*mb;
			PositiveDefiniteMatrix M = new PositiveDefiniteMatrix(3, 3);
			M[0, 0] = ma2;
			M[0, 1] = ma*mb;
			M[0, 2] = ma;
			M[1, 0] = ma*mb;
			M[1, 1] = mb2;
			M[1, 2] = mb;
			M[2, 0] = ma;
			M[2, 1] = mb;
			M[2, 2] = 1;
			w = w.PredivideBy(M);
			Vector weights = Vector.Zero(4);
			weights[0] = w[0];
			weights[1] = w[1];
			weights[2] = w[2];
			weights[3] = ma2*mb2 - w[0]*ma2*mb - w[1]*mb2*ma - w[2]*ma*mb;
			if (weights[3] < 0) weights[3] = 0;
			if (false) {
				// debugging
				GaussianEstimator est = new GaussianEstimator();
				for (int i = 0; i < 10000; i++) {
					double sa = A.Sample();
					double sb = B.Sample();
					double f = sa*sb;
					double g = sa*weights[0] + sb*weights[1] + weights[2];
					est.Add(f-g);
				}
				Console.WriteLine(weights);
				Console.WriteLine(est.GetDistribution(new Gaussian()));
			}
			return weights;
		}
Exemplo n.º 19
0
		public static Gaussian SampleAverageConditional(Gaussian sample, [SkipIfUniform] Gaussian mean, [SkipIfUniform] Gamma precision)
		{
			if (sample.IsUniform() && precision.Shape <= 1.0) sample = Gaussian.FromNatural(1e-20, 1e-20);
			if (precision.IsPointMass) {
				return SampleAverageConditional(mean, precision.Point);
			} else if (sample.IsUniform()) {
				// for large vx, Z =approx N(mx; mm, vx+vm+E[1/prec])
				double mm,mv;
				mean.GetMeanAndVariance(out mm, out mv);
				// NOTE: this error may happen because sample didn't receive any message yet under the schedule.
				// Need to make the scheduler smarter to avoid this.
				if (precision.Shape <= 1.0) throw new ArgumentException("The posterior has infinite variance due to precision distributed as "+precision+" (shape <= 1).  Try using a different prior for the precision, with shape > 1.");
				return Gaussian.FromMeanAndVariance(mm, mv + precision.GetMeanInverse());
			} else if (mean.IsUniform() || precision.IsUniform()) {
				return Gaussian.Uniform();
			} else if (sample.IsPointMass) {
				// The correct answer here is not uniform, but rather a limit.  
				// However it doesn't really matter what we return since multiplication by a point mass 
				// always yields a point mass.
				return Gaussian.Uniform();
			} else if (!precision.IsProper()) {
				throw new ImproperMessageException(precision);
			} else {
				double mx, vx;
				sample.GetMeanAndVariance(out mx, out vx);
				double mm, vm;
				mean.GetMeanAndVariance(out mm, out vm);
				double m = mx-mm;
				double v = vx+vm;
				if (double.IsPositiveInfinity(v)) return Gaussian.Uniform();
				double m2 = m*m;
				Gamma q = GaussianOp_Laplace.Q(sample, mean, precision, GaussianOp_Laplace.QInit());
				double a = precision.Shape;
				double b = precision.Rate;
				double r = q.GetMean();
				double rmin, rmax;
				GetIntegrationBoundsForPrecision(r, m, v, a, b, out rmin, out rmax);
				int n = 20000;
				double inc = (Math.Log(rmax)-Math.Log(rmin))/(n-1);
				GaussianEstimator est = new GaussianEstimator();
				double shift = 0;
				for (int i = 0; i < n; i++) {
					double prec = rmin*Math.Exp(i*inc);
					double newVar, newMean;
					// the following works even if sample is uniform. (sample.Precision == 0)
					if (mean.IsPointMass) {
						// take limit mean.Precision -> Inf
						newVar = 1.0 / (prec + sample.Precision);
						newMean = newVar * (prec * mean.Point + sample.MeanTimesPrecision);
					} else {
						// mean.Precision < Inf
						double R = prec / (prec + mean.Precision);
						newVar = 1.0 / (R * mean.Precision + sample.Precision);
						newMean = newVar * (R * mean.MeanTimesPrecision + sample.MeanTimesPrecision);
					}
					double logp = -0.5*Math.Log(v+1/prec) -0.5*m2/(v+1/prec) + a*Math.Log(prec) - b*prec;
					if (i == 0) shift = logp;
					est.Add(Gaussian.FromMeanAndVariance(newMean, newVar), Math.Exp(logp-shift));
				}
				if (est.mva.Count == 0) throw new Exception("Quadrature found zero mass");
				if (double.IsNaN(est.mva.Count)) throw new Exception("count is nan");
				Gaussian sampleMarginal = est.GetDistribution(new Gaussian());
				Gaussian result = new Gaussian();
				result.SetToRatio(sampleMarginal, sample, GaussianOp.ForceProper);
				if (double.IsNaN(result.Precision)) throw new Exception("result is nan");
				return result;
			}
		}