예제 #1
0
        // /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogisticOp"]/message_doc[@name="LogisticAverageConditional(Beta, Gaussian, Gaussian)"]/*'/>
        public static Beta LogisticAverageConditional(Beta logistic, [Proper] Gaussian x, Gaussian falseMsg, Gaussian to_x)
        {
            if (x.IsPointMass)
            {
                return(Beta.PointMass(MMath.Logistic(x.Point)));
            }

            if (logistic.IsPointMass || x.IsUniform())
            {
                return(Beta.Uniform());
            }

            Gaussian post = to_x * x;
            double   m, v;

            post.GetMeanAndVariance(out m, out v);
            double mean        = MMath.LogisticGaussian(m, v);
            bool   useVariance = logistic.IsUniform(); // useVariance gives lower accuracy on tests, but is required for the uniform case

            if (useVariance)
            {
                // meanTF = E[p] - E[p^2]
                double meanTF     = MMath.LogisticGaussianDerivative(m, v);
                double meanSquare = mean - meanTF;
                Beta   result     = Beta.FromMeanAndVariance(mean, meanSquare - mean * mean);
                result.SetToRatio(result, logistic, true);
                return(result);
            }
            else
            {
                double logZ = LogAverageFactor(logistic, x, falseMsg) + logistic.GetLogNormalizer(); // log int_x logistic(sigma(x)) N(x;m,v) dx
                double tc1  = logistic.TrueCount - 1;
                double fc1  = logistic.FalseCount - 1;
                return(BetaFromMeanAndIntegral(mean, logZ, tc1, fc1));
            }
        }
예제 #2
0
        /// <summary>
        /// VMP message to LogOdds
        /// </summary>
        /// <param name="sample">Incoming message from sample</param>
        /// <param name="logOdds">Incoming message from logOdds</param>
        /// <returns><c>sum_x marginal(x)*log(factor(x))</c></returns>
        /// <remarks><para>
        /// The outgoing message is the exponential of the integral of the log-factor times incoming messages, over all arguments except 'logOdds'.
        /// The formula is <c>int log(f(logOdds,x)) q(x) dx</c> where <c>x = (sample)</c>.
        /// </para></remarks>
        public static Gaussian LogOddsAverageLogarithm(bool sample, Gaussian logOdds)
        {
            // This is the non-conjugate VMP update using the Saul and Jordan (1999) bound.
            double m, v;

            logOdds.GetMeanAndVariance(out m, out v);
            double a = 0.5;

            // TODO: use a buffer to store the value of 'a', so it doesn't need to be re-optimised each time.
            for (int iter = 0; iter < 10; iter++)
            {
                double aOld = a;
                a = MMath.Logistic(m + (1 - 2 * a) * v * 0.5);
                if (Math.Abs(a - aOld) < 1e-8)
                {
                    break;
                }
            }
            double sa = MMath.Logistic(m + (1 - 2 * a) * v * 0.5);
            double vf = 1 / (a * a + (1 - 2 * a) * sa);
            double mf = m + vf * (sample ? 1 - sa : sa);

            return(Gaussian.FromMeanAndVariance(mf, vf));
        }
 public static bool BernoulliFromLogOdds(double logOdds)
 {
     return(Bernoulli(MMath.Logistic(logOdds)));
 }
예제 #4
0
 /// <summary>
 /// Gets the probability of the binary variable being false
 /// </summary>
 /// <returns>p(x=false)</returns>
 public double GetProbFalse()
 {
     return(MMath.Logistic(-LogOdds));
 }
예제 #5
0
 /// <summary>
 /// Gets the probability of the binary variable being true
 /// </summary>
 /// <returns>p(x=true)</returns>
 public double GetProbTrue()
 {
     return(MMath.Logistic(LogOdds));
 }
        /// <summary>
        /// VMP message to 'x'
        /// </summary>
        /// <param name="logistic">Incoming message from 'logistic'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <param name="x">Incoming message from 'x'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <param name="to_x">Previous outgoing message to 'X'.</param>
        /// <param name="a">Buffer 'a'.</param>
        /// <returns>The outgoing VMP message to the 'x' argument</returns>
        /// <remarks><para>
        /// The outgoing message is the factor viewed as a function of 'x' with 'logistic' integrated out.
        /// The formula is <c>sum_logistic p(logistic) factor(logistic,x)</c>.
        /// </para></remarks>
        /// <exception cref="ImproperMessageException"><paramref name="logistic"/> is not a proper distribution</exception>
        /// <exception cref="ImproperMessageException"><paramref name="x"/> is not a proper distribution</exception>
        public static Gaussian XAverageLogarithm([SkipIfUniform] Beta logistic, /*[Proper, SkipIfUniform]*/ Gaussian x, Gaussian to_x, double a)
        {
            if (logistic.IsPointMass)
            {
                return(LogisticOp.XAverageLogarithm(logistic.Point));
            }
            // f(x) = sigma(x)^(a-1) sigma(-x)^(b-1)
            //      = sigma(x)^(a+b-2) exp(-x(b-1))
            // since sigma(-x) = sigma(x) exp(-x)

            double scale = logistic.TrueCount + logistic.FalseCount - 2;

            if (scale == 0.0)
            {
                return(Gaussian.Uniform());
            }
            double shift = -(logistic.FalseCount - 1);
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            double sa;

            if (double.IsPositiveInfinity(v))
            {
                a  = 0.5;
                sa = MMath.Logistic(m);
            }
            else
            {
                sa = MMath.Logistic(m + (1 - 2 * a) * v * 0.5);
            }
            double precision = a * a + (1 - 2 * a) * sa;
            // meanTimesPrecision = m*a*a + 1-2*a*sa;
            double meanTimesPrecision = m * precision + 1 - sa;
            //double vf = 1/(a*a + (1-2*a)*sa);
            //double mf = m + vf*(true ? 1-sa : sa);
            //double precision = 1/vf;
            //double meanTimesPrecision = mf*precision;
            Gaussian result = Gaussian.FromNatural(scale * meanTimesPrecision + shift, scale * precision);
            double   step   = (LogisticOp_SJ99.global_step == 0.0) ? 1.0 : (Rand.Double() * LogisticOp_SJ99.global_step);         // random damping helps convergence, especially with parallel updates

            if (false && !x.IsPointMass)
            {
                // if the update would change the sign of 1-2*sa, send a message to make sa=0.5
                double newPrec = x.Precision - to_x.Precision + result.Precision;
                double newv    = 1 / newPrec;
                double newm    = newv * (x.MeanTimesPrecision - to_x.MeanTimesPrecision + result.MeanTimesPrecision);
                double newarg  = newm + (1 - 2 * a) * newv * 0.5;
                if ((sa < 0.5 && newarg > 0) || (sa > 0.5 && newarg < 0))
                {
                    // send a message to make newarg=0
                    // it is sufficient to make (x.MeanTimesPrecision + step*(result.MeanTimesPrecision - to_x.MeanTimesPrecision) + 0.5-a) = 0
                    double mpOffset   = x.MeanTimesPrecision + 0.5 - a;
                    double precOffset = x.Precision;
                    double mpScale    = result.MeanTimesPrecision - to_x.MeanTimesPrecision;
                    double precScale  = result.Precision - to_x.Precision;
                    double arg        = m + (1 - 2 * a) * v * 0.5;
                    //arg = 0;
                    step = (arg * precOffset - mpOffset) / (mpScale - arg * precScale);
                    //step = (a-0.5-x.MeanTimesPrecision)/(result.MeanTimesPrecision - to_x.MeanTimesPrecision);
                    //Console.WriteLine(step);
                }
            }
            if (step != 1.0)
            {
                result.Precision          = step * result.Precision + (1 - step) * to_x.Precision;
                result.MeanTimesPrecision = step * result.MeanTimesPrecision + (1 - step) * to_x.MeanTimesPrecision;
            }
            return(result);
        }
 /// <summary>
 /// Evidence message for EP
 /// </summary>
 /// <param name="logistic">Incoming message from 'logistic'.</param>
 /// <param name="x">Constant value for 'x'.</param>
 /// <returns>Logarithm of the factor's average value across the given argument distributions</returns>
 /// <remarks><para>
 /// The formula for the result is <c>log(sum_(logistic) p(logistic) factor(logistic,x))</c>.
 /// </para></remarks>
 public static double LogAverageFactor(Beta logistic, double x)
 {
     return(logistic.GetLogProb(MMath.Logistic(x)));
 }
 /// <summary>
 /// Evidence message for EP
 /// </summary>
 /// <param name="logistic">Constant value for 'logistic'.</param>
 /// <param name="x">Constant value for 'x'.</param>
 /// <returns>Logarithm of the factor's average value across the given argument distributions</returns>
 /// <remarks><para>
 /// The formula for the result is <c>log(factor(logistic,x))</c>.
 /// </para></remarks>
 public static double LogAverageFactor(double logistic, double x)
 {
     return((logistic == MMath.Logistic(x)) ? 0.0 : Double.NegativeInfinity);
 }
예제 #9
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogisticOp"]/message_doc[@name="FalseMsg(Beta, Gaussian, Gaussian)"]/*'/>
        public static Gaussian FalseMsg([SkipIfUniform] Beta logistic, [Proper] Gaussian x, Gaussian falseMsg)
        {
            // falseMsg approximates sigma(-x)
            // logistic(sigma(x)) N(x;m,v)
            // = sigma(x)^(a-1) sigma(-x)^(b-1) N(x;m,v)
            // = e^((a-1)x) sigma(-x)^(a+b-2) N(x;m,v)
            // = sigma(-x)^(a+b-2) N(x;m+(a-1)v,v) exp((a-1)m + (a-1)^2 v/2)
            // = sigma(-x) (prior)
            // where prior = sigma(-x)^(a+b-3) N(x;m+(a-1)v,v)
            double tc1 = logistic.TrueCount - 1;
            double fc1 = logistic.FalseCount - 1;
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            if (tc1 + fc1 == 0)
            {
                falseMsg.SetToUniform();
                return(falseMsg);
            }
            else if (tc1 + fc1 < 0)
            {
                // power EP update, using 1/sigma(-x) as the factor
                Gaussian prior = new Gaussian(m + tc1 * v, v) * (falseMsg ^ (tc1 + fc1 + 1));
                double   mprior, vprior;
                prior.GetMeanAndVariance(out mprior, out vprior);
                // posterior moments can be computed exactly
                double   w    = MMath.Logistic(mprior + 0.5 * vprior);
                Gaussian post = new Gaussian(mprior + w * vprior, vprior * (1 + w * (1 - w) * vprior));
                return(prior / post);
            }
            else
            {
                // power EP update
                Gaussian prior  = new Gaussian(m + tc1 * v, v) * (falseMsg ^ (tc1 + fc1 - 1));
                Gaussian newMsg = BernoulliFromLogOddsOp.LogOddsAverageConditional(false, prior);
                //Console.WriteLine("prior = {0}, falseMsg = {1}, newMsg = {2}", prior, falseMsg, newMsg);
                if (string.Empty.Length == 0)
                {
                    // adaptive damping scheme
                    Gaussian ratio = newMsg / falseMsg;
                    if ((ratio.MeanTimesPrecision < 0 && prior.MeanTimesPrecision > 0) ||
                        (ratio.MeanTimesPrecision > 0 && prior.MeanTimesPrecision < 0))
                    {
                        // if the update would change the sign of the mean, take a fractional step so that the new prior has exactly zero mean
                        // newMsg = falseMsg * (ratio^step)
                        // newPrior = prior * (ratio^step)^(tc1+fc1-1)
                        // 0 = prior.mp + ratio.mp*step*(tc1+fc1-1)
                        double step = -prior.MeanTimesPrecision / (ratio.MeanTimesPrecision * (tc1 + fc1 - 1));
                        if (step > 0 && step < 1)
                        {
                            newMsg = falseMsg * (ratio ^ step);
                            // check that newPrior has zero mean
                            //Gaussian newPrior = prior * ((ratio^step)^(tc1+fc1-1));
                            //Console.WriteLine(newPrior);
                        }
                    }
                }
                else
                {
                    for (int iter = 0; iter < 10; iter++)
                    {
                        newMsg   = falseMsg * ((newMsg / falseMsg) ^ 0.5);
                        falseMsg = newMsg;
                        //Console.WriteLine("prior = {0}, falseMsg = {1}, newMsg = {2}", prior, falseMsg, newMsg);
                        prior  = new Gaussian(m + tc1 * v, v) * (falseMsg ^ (tc1 + fc1 - 1));
                        newMsg = BernoulliFromLogOddsOp.LogOddsAverageConditional(false, prior);
                    }
                }
                return(newMsg);
            }
        }
 /// <summary>
 /// Gets the probability of the binary variable being false
 /// </summary>
 /// <returns>p(x=false)</returns>
 public double GetProbFalse(int index)
 {
     return(MMath.Logistic(-LogOddsVector[index]));
 }
        /// <summary>
        /// The expected logarithm of that distribution under this distribution.
        /// </summary>
        /// <param name="that">The distribution to take the logarithm of.</param>
        /// <returns><c>sum_x this.Evaluate(x)*Math.Log(that.Evaluate(x))</c></returns>
        /// <remarks>This is also known as the cross entropy.</remarks>
        public double GetAverageLog(BernoulliIntegerSubset that)
        {
            var res  = SparseVector.Zero(LogOddsVector.Count);
            var res2 = SparseVector.Zero(LogOddsVector.Count);

            res.SetToFunction(LogOddsVector, that.GetLogProbTrueVector(), (logpr, logProbTrue) => double.IsNegativeInfinity(logpr) ?0 :MMath.Logistic(logpr) * logProbTrue);
            res2.SetToFunction(LogOddsVector, that.GetLogProbFalseVector(), (logpr, logProbFalse) => double.IsPositiveInfinity(logpr) ?0 :MMath.Logistic(-logpr) * logProbFalse);
            return(res.Sum() + res2.Sum());
        }
예제 #12
0
        public void Sample(Options options, Matrix data)
        {
            if (options.numParams > 2)
            {
                throw new Exception("numParams > 2");
            }
            int numStudents  = data.Rows;
            int numQuestions = data.Cols;
            // initialize the sampler at the mean of the priors (not sampling from the priors)
            double abilityMean        = abilityMeanPrior.GetMean();
            double abilityPrec        = abilityPrecPrior.GetMean();
            double difficultyMean     = difficultyMeanPrior.GetMean();
            double difficultyPrec     = difficultyPrecPrior.GetMean();
            double discriminationMean = discriminationMeanPrior.GetMean();
            double discriminationPrec = discriminationPrecPrior.GetMean();

            double[]            ability             = new double[numStudents];
            double[]            difficulty          = new double[numQuestions];
            List <double>[]     difficultySamples   = new List <double> [numQuestions];
            GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                difficultyEstimator[question] = new GaussianEstimator();
                difficultySamples[question]   = new List <double>();
                if (difficultyObserved != null)
                {
                    difficulty[question] = difficultyObserved[question];
                    difficultyEstimator[question].Add(difficultyObserved[question]);
                    difficultySamples[question].Add(difficultyObserved[question]);
                }
            }
            List <double>[]     abilitySamples   = new List <double> [numStudents];
            GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length];
            for (int student = 0; student < abilityEstimator.Length; student++)
            {
                abilityEstimator[student] = new GaussianEstimator();
                abilitySamples[student]   = new List <double>();
                if (abilityObserved != null)
                {
                    ability[student] = abilityObserved[student];
                    abilityEstimator[student].Add(abilityObserved[student]);
                    abilitySamples[student].Add(abilityObserved[student]);
                }
            }
            double[]         discrimination          = new double[numQuestions];
            List <double>[]  discriminationSamples   = new List <double> [numQuestions];
            GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                discriminationEstimator[question] = new GammaEstimator();
                discriminationSamples[question]   = new List <double>();
                discrimination[question]          = 1;
                if (discriminationObserved != null)
                {
                    discrimination[question] = discriminationObserved[question];
                    discriminationEstimator[question].Add(discriminationObserved[question]);
                    discriminationSamples[question].Add(discriminationObserved[question]);
                }
            }
            responseProbMean = new Matrix(numStudents, numQuestions);
            int    niters           = options.numberOfSamples;
            int    burnin           = options.burnIn;
            double logisticVariance = Math.PI * Math.PI / 3;
            double shape            = 4.5;
            Gamma  precPrior        = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance);

            precPrior      = Gamma.PointMass(1);
            double[,] prec = new double[numStudents, numQuestions];
            double[,] x    = new double[numStudents, numQuestions];
            int numRejected = 0, numAttempts = 0;

            for (int iter = 0; iter < niters; iter++)
            {
                for (int student = 0; student < numStudents; student++)
                {
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // sample prec given ability, difficulty, x
                        // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2)
                        Gamma  precPost = precPrior;
                        double xMean    = (ability[student] - difficulty[question]) * discrimination[question];
                        double delta    = x[student, question] - xMean;
                        Gamma  like     = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta);
                        precPost.SetToProduct(precPost, like);
                        prec[student, question] = precPost.Sample();
                        // sample x given ability, difficulty, prec, data
                        // using an independence chain MH
                        bool     y      = (data[student, question] > 0);
                        double   sign   = y ? 1.0 : -1.0;
                        Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]);
                        // we want to sample from xPrior*I(x>0)
                        // instead we sample from xPost
                        Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior);
                        double   oldx  = x[student, question];
                        double   newx  = xPost.Sample();
                        numAttempts++;
                        if (newx * sign < 0)
                        {
                            newx = oldx; // rejected
                            numRejected++;
                        }
                        else
                        {
                            // importance weights
                            double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx);
                            double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx);
                            // acceptance ratio
                            double paccept = Math.Exp(neww - oldw);
                            if (paccept < 1 && Rand.Double() > paccept)
                            {
                                newx = oldx; // rejected
                                numRejected++;
                            }
                        }
                        x[student, question] = newx;
                        if (iter >= burnin)
                        {
                            double responseProb = MMath.Logistic(xMean);
                            responseProbMean[student, question] += responseProb;
                        }
                    }
                }
                if (abilityObserved == null)
                {
                    // sample ability given difficulty, prec, x
                    for (int student = 0; student < numStudents; student++)
                    {
                        Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec);
                        for (int question = 0; question < numQuestions; question++)
                        {
                            // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec)
                            Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, abilityLike);
                        }
                        ability[student] = post.Sample();
                        if (iter >= burnin)
                        {
                            abilityEstimator[student].Add(post);
                            abilitySamples[student].Add(ability[student]);
                        }
                    }
                }
                // sample difficulty given ability, prec, x
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec);
                    for (int student = 0; student < numStudents; student++)
                    {
                        // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec)
                        if (discrimination[question] > 0)
                        {
                            Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, like);
                        }
                    }
                    difficulty[question] = post.Sample();
                    if (iter >= burnin)
                    {
                        //if (difficulty[question] > 100)
                        //    Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]);
                        difficultyEstimator[question].Add(post);
                        difficultySamples[question].Add(difficulty[question]);
                    }
                }
                if (options.numParams > 1 && discriminationObserved == null)
                {
                    // sample discrimination given ability, difficulty, prec, x
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // moment-matching on the prior
                        Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1));
                        Gaussian post        = approxPrior;
                        for (int student = 0; student < numStudents; student++)
                        {
                            // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2)
                            double delta = ability[student] - difficulty[question];
                            if (delta > 0)
                            {
                                Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta);
                                post.SetToProduct(post, like);
                            }
                        }
                        TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity);
                        double            olddisc   = discrimination[question];
                        double            newdisc   = postTrunc.Sample();
                        // importance weights
                        Func <double, double> priorLogProb = delegate(double d)
                        {
                            double logd = Math.Log(d);
                            return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd);
                        };
                        double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc);
                        double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc);
                        // acceptance ratio
                        double paccept = Math.Exp(neww - oldw);
                        if (paccept < 1 && Rand.Double() > paccept)
                        {
                            // rejected
                        }
                        else
                        {
                            discrimination[question] = newdisc;
                        }
                        if (iter >= burnin)
                        {
                            discriminationEstimator[question].Add(discrimination[question]);
                            discriminationSamples[question].Add(discrimination[question]);
                        }
                    }
                }
                // sample abilityMean given ability, abilityPrec
                Gaussian abilityMeanPost = abilityMeanPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec);
                    abilityMeanPost *= like;
                }
                abilityMean = abilityMeanPost.Sample();
                // sample abilityPrec given ability, abilityMean
                Gamma abilityPrecPost = abilityPrecPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean);
                    abilityPrecPost *= like;
                }
                abilityPrec = abilityPrecPost.Sample();
                // sample difficultyMean given difficulty, difficultyPrec
                Gaussian difficultyMeanPost = difficultyMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec);
                    difficultyMeanPost *= like;
                }
                difficultyMean = difficultyMeanPost.Sample();
                // sample difficultyPrec given difficulty, difficultyMean
                Gamma difficultyPrecPost = difficultyPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean);
                    difficultyPrecPost *= like;
                }
                difficultyPrec = difficultyPrecPost.Sample();
                // sample discriminationMean given discrimination, discriminationPrec
                Gaussian discriminationMeanPost = discriminationMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec);
                    discriminationMeanPost *= like;
                }
                discriminationMean = discriminationMeanPost.Sample();
                // sample discriminationPrec given discrimination, discriminationMean
                Gamma discriminationPrecPost = discriminationPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean);
                    discriminationPrecPost *= like;
                }
                discriminationPrec = discriminationPrecPost.Sample();
                //if (iter % 1 == 0)
                //    Console.WriteLine("iter = {0}", iter);
            }
            //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec);
            //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec);
            int numSamplesUsed = niters - burnin;

            responseProbMean.Scale(1.0 / numSamplesUsed);
            //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts);
            difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            abilityPost    = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            if (options.numParams > 1)
            {
                discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma()));
            }
            abilityCred    = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples);
            difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples);
            bool saveSamples = false;

            if (saveSamples)
            {
                using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat"))
                {
                    int q = 11;
                    writer.Write("difficulty", difficultySamples[q]);
                    writer.Write("discrimination", discriminationSamples[q]);
                }
            }
        }