예제 #1
0
        internal void GaussianOpRandomTest()
        {
            int count = 0;

            while (true)
            {
                var mean      = Gaussian.FromNatural(Rand.Double(), Rand.Double());
                var sample    = Gaussian.FromNatural(0, Rand.Double());
                var precision = Gamma.FromNatural(Rand.Double() - 1, Rand.Double());
                try
                {
                    GaussianOp.PrecisionAverageConditional(sample, mean, precision);
                }
                catch (Exception e)
                {
                    if (e.Message == "not converging")
                    {
                        Console.WriteLine("sample {0} {1}", sample.MeanTimesPrecision, sample.Precision);
                        Console.WriteLine("mean {0} {1}", mean.MeanTimesPrecision, mean.Precision);
                        Console.WriteLine("prec {0} {1}", precision.Shape, precision.Rate);
                        Console.WriteLine();
                    }
                    else
                    {
                        Console.WriteLine(e);
                    }
                }
                count++;
                if (count % 100 == 0)
                {
                    Console.WriteLine("{0}", count);
                }
            }
        }
예제 #2
0
 public void GaussianOpPrecision_IsMonotonicInSampleVariance()
 {
     using (TestUtils.TemporarilyAllowGaussianImproperMessages)
     {
         Gaussian mean = Gaussian.PointMass(0);
         for (int logRate = 0; logRate < 310; logRate++)
         {
             Gamma  precision    = Gamma.FromShapeAndRate(300, System.Math.Pow(10, logRate));
             double previousRate = double.PositiveInfinity;
             for (int i = 0; i < 310; i++)
             {
                 Gaussian sample  = Gaussian.FromMeanAndPrecision(0, System.Math.Pow(10, -i));
                 Gamma    precMsg = GaussianOp.PrecisionAverageConditional(sample, mean, precision);
                 //precMsg = GaussianOp_Laplace.PrecisionAverageConditional_slow(sample, mean, precision);
                 //Gamma precMsg2 = GaussianOp_Slow.PrecisionAverageConditional(sample, mean, precision);
                 //Console.WriteLine("{0}: {1} should be {2}", sample, precMsg, precMsg2);
                 Gamma post = precMsg * precision;
                 //Trace.WriteLine($"{sample}: {precMsg.Rate} post = {post.Rate}");
                 if (i >= logRate)
                 {
                     Assert.True(precMsg.Rate <= previousRate);
                 }
                 previousRate = precMsg.Rate;
             }
         }
     }
 }
예제 #3
0
        /// <summary>Computations that depend on the observed value of vy</summary>
        private void Changed_vy()
        {
            if (this.Changed_vy_isDone)
            {
                return;
            }
            Gaussian vyMean_F = default(Gaussian);

            this.vyMean_marginal_F = Gaussian.Uniform();
            Gaussian vyMean_use_B = default(Gaussian);

            // Message to 'vyMean' from GaussianFromMeanAndVariance factor
            vyMean_F = GaussianFromMeanAndVarianceOp.SampleAverageConditional(0.0, 10.0);
            Gamma vySigma_F = default(Gamma);

            // Message to 'vySigma' from Sample factor
            vySigma_F = GammaFromShapeAndScaleOp.SampleAverageConditional(1.0, 1.0);
            Gamma vySigma_use_B = default(Gamma);

            // Message to 'vySigma_use' from Gaussian factor
            vySigma_use_B = GaussianOp.PrecisionAverageConditional(Gaussian.PointMass(this.Vy), vyMean_F, vySigma_F);
            // Message to 'vyMean_use' from Gaussian factor
            vyMean_use_B = GaussianOp.MeanAverageConditional(this.Vy, vyMean_F, vySigma_F, vySigma_use_B);
            // Message to 'vyMean_marginal' from Variable factor
            this.vyMean_marginal_F  = VariableOp.MarginalAverageConditional <Gaussian>(vyMean_use_B, vyMean_F, this.vyMean_marginal_F);
            this.vySigma_marginal_F = Gamma.Uniform();
            // Message to 'vySigma_marginal' from Variable factor
            this.vySigma_marginal_F = VariableOp.MarginalAverageConditional <Gamma>(vySigma_use_B, vySigma_F, this.vySigma_marginal_F);
            this.vy_marginal_F      = Gaussian.Uniform();
            // Message to 'vy_marginal' from DerivedVariable factor
            this.vy_marginal_F     = DerivedVariableOp.MarginalAverageConditional <Gaussian, double>(this.Vy, this.vy_marginal_F);
            this.Changed_vy_isDone = true;
        }
예제 #4
0
 internal void GaussianOpPrecision3()
 {
     using (TestUtils.TemporarilyAllowGaussianImproperMessages)
     {
         Gaussian mean      = Gaussian.PointMass(0);
         Gamma    precision = Gamma.FromShapeAndRate(2, 10);
         for (int i = -10; i < 10; i++)
         {
             Gaussian sample  = Gaussian.FromMeanAndPrecision(0, System.Math.Pow(10, -i));
             Gamma    precMsg = GaussianOp.PrecisionAverageConditional(sample, mean, precision);
             //precMsg = GaussianOp_Laplace.PrecisionAverageConditional_slow(sample, mean, precision);
             //Gamma precMsg2 = GaussianOp_Slow.PrecisionAverageConditional(sample, mean, precision);
             //Console.WriteLine("{0}: {1} should be {2}", sample, precMsg, precMsg2);
             Gamma post = precMsg * precision;
             Console.WriteLine("{0}: {1} post = {2}", sample, precMsg.Rate, post.Rate);
         }
     }
 }
예제 #5
0
        public void GaussianOpPrecision()
        {
            Gamma    precMsg, precMsg2;
            Gaussian X, Mean;
            Gamma    Precision;

            X         = Gaussian.FromNatural(-1.5098177152950143E-09, 1.061649960537027E-168);
            Mean      = Gaussian.FromNatural(-3.6177299471249587, 0.11664740799025652);
            Precision = Gamma.FromShapeAndRate(306.39423695125572, 1.8326832031565403E+170);
            precMsg   = Gamma.PointMass(0);
            precMsg2  = GaussianOp_Slow.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);
            precMsg2 = GaussianOp.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);

            X         = Gaussian.FromNatural(-0.55657497231637854, 6.6259783218464713E-141);
            Mean      = Gaussian.FromNatural(-2.9330116542965374, 0.07513822741674292);
            Precision = Gamma.FromShapeAndRate(308.8184220331475, 4.6489382805051884E+142);
            precMsg   = Gamma.FromShapeAndRate(1.5000000000000628, 3.5279086383286634E+279);
            precMsg2  = GaussianOp_Slow.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);
            precMsg2 = GaussianOp.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);

            X         = Gaussian.FromNatural(0, 1.0705890985886898E-153);
            Mean      = Gaussian.PointMass(0);
            Precision = Gamma.FromShapeAndRate(1.6461630749684018, 1.0021354807958952E+153);
            precMsg   = Gamma.FromShapeAndRate(1.3230815374839406, 5.7102212927459039E+151);
            precMsg2  = GaussianOp_Slow.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);
            precMsg2 = GaussianOp.PrecisionAverageConditional(X, Mean, Precision);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);

            GaussianOp.PrecisionAverageConditional(Gaussian.FromNatural(0, 0.00849303091340374), Gaussian.FromNatural(0.303940178036662, 0.0357912415805232),
                                                   Gamma.FromNatural(0.870172077263786 - 1, 0.241027170904459));
            GaussianOp.PrecisionAverageConditional(Gaussian.FromNatural(0, 0.932143343115292), Gaussian.FromNatural(0.803368837946732, 0.096549750816333),
                                                   Gamma.FromNatural(0.63591693650741 - 1, 0.728459389753854));
            GaussianOp.PrecisionAverageConditional(Gaussian.FromNatural(0, 0.799724777601531), Gaussian.FromNatural(0.351882387116497, 0.0795619408970522),
                                                   Gamma.FromNatural(0.0398852019756498 - 1, 0.260567798400562));
            GaussianOp.PrecisionAverageConditional(Gaussian.FromNatural(0, 0.826197353576402), Gaussian.FromNatural(0.655970732055591, 0.125333868956814),
                                                   Gamma.FromNatural(0.202543332801453 - 1, 0.147645744563847));

            precMsg = GaussianOp.PrecisionAverageConditional(new Gaussian(-6.235e+207, 1.947e+209), Gaussian.PointMass(11), Gamma.PointMass(7));
            Assert.True(!double.IsNaN(precMsg.Rate));

            Gaussian X0         = Gaussian.FromMeanAndVariance(3, 0.5);
            Gaussian Mean0      = Gaussian.FromMeanAndVariance(7, 1.0 / 3);
            Gamma    Precision0 = Gamma.FromShapeAndScale(3, 3);

            precMsg = GaussianOp_Slow.PrecisionAverageConditional(Gaussian.FromNatural(0.010158033515400506, 0.0041117304509528533),
                                                                  Gaussian.FromNatural(33.157651455559929, 13.955304749880149),
                                                                  Gamma.FromShapeAndRate(7.1611372018172794, 1.8190207317123008));

            precMsg = GaussianOp_Slow.PrecisionAverageConditional(Gaussian.FromNatural(-0.020177353724675218, 0.0080005002339157711),
                                                                  Gaussian.FromNatural(-12.303440746896294, 4.6439574387849714),
                                                                  Gamma.FromShapeAndRate(5.6778922774773992, 1.0667129560350435));

            precMsg = GaussianOp_Slow.PrecisionAverageConditional(Gaussian.PointMass(248), Gaussian.FromNatural(0.099086933095776319, 0.00032349393599347853),
                                                                  Gamma.FromShapeAndRate(0.001, 0.001));
            precMsg2 = GaussianOp.PrecisionAverageConditional_slow(Gaussian.PointMass(248), Gaussian.FromNatural(0.099086933095776319, 0.00032349393599347853),
                                                                   Gamma.FromShapeAndRate(0.001, 0.001));
            Assert.True(precMsg.MaxDiff(precMsg2) < 0.3);

            precMsg =
                GaussianOp_Slow.PrecisionAverageConditional(Gaussian.FromNatural(-0.21769764449791806, 0.0000024898838689952023),
                                                            Gaussian.FromNatural(0, 0.5), Gamma.FromShapeAndRate(5, 5));
            precMsg2 =
                GaussianOp.PrecisionAverageConditional_slow(Gaussian.FromNatural(-0.21769764449791806, 0.0000024898838689952023),
                                                            Gaussian.FromNatural(0, 0.5), Gamma.FromShapeAndRate(5, 5));
            //Console.WriteLine("{0} should be {1}", precMsg2, precMsg);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);
            Gamma precMsg3 =
                GaussianOp_Laplace.PrecisionAverageConditional_slow(Gaussian.FromNatural(-0.21769764449791806, 0.0000024898838689952023),
                                                                    Gaussian.FromNatural(0, 0.5), Gamma.FromShapeAndRate(5, 5));

            precMsg2 =
                GaussianOp.PrecisionAverageConditional(Gaussian.FromNatural(-0.21769764449791806, 0.0000024898838689952023),
                                                       Gaussian.FromNatural(0, 0.5), Gamma.FromShapeAndRate(5, 5));
            //Console.WriteLine("{0} should be {1}", precMsg2, precMsg);
            Assert.True(precMsg.MaxDiff(precMsg2) < 1e-4);

            Assert.True(GaussianOp.PrecisionAverageConditional_slow(Gaussian.FromNatural(-2.3874057896477092, 0.0070584383295080044),
                                                                    Gaussian.FromNatural(1.3999879871144227, 0.547354438587195), Gamma.FromShapeAndRate(3, 1))
                        .MaxDiff(Gamma.FromShapeAndRate(1.421, 55546)) < 10);

            // Unknown precision
            if (GaussianOp.ForceProper)
            {
                Assert.True(GaussianOp.PrecisionAverageConditional_slow(X0, Mean0, Precision0).MaxDiff(Gamma.FromShapeAndRate(1, 0.3632)) < 1e-4);
            }
            else
            {
                Assert.True(GaussianOp.PrecisionAverageConditional_slow(X0, Mean0, Precision0).MaxDiff(Gamma.FromShapeAndRate(-0.96304, -0.092572)) < 1e-4);
            }
            if (GaussianOp.ForceProper)
            {
                Assert.True(GaussianOp.PrecisionAverageConditional_slow(Gaussian.PointMass(3.0), Mean0, Precision0).MaxDiff(Gamma.FromShapeAndRate(1, 4.13824)) < 1e-4);
            }
            else
            {
                Assert.True(GaussianOp.PrecisionAverageConditional_slow(Gaussian.PointMass(3.0), Mean0, Precision0).MaxDiff(Gamma.FromShapeAndRate(-0.24693, 2.2797)) < 1e-4);
            }
            Assert.True(GaussianOp.PrecisionAverageConditional(3.0, 7.0).MaxDiff(Gamma.FromShapeAndRate(1.5, 8.0)) < 1e-4);
            Assert.True(GaussianOp.PrecisionAverageConditional_slow(new Gaussian(), Gaussian.PointMass(7.0), Precision0).MaxDiff(Gamma.FromShapeAndRate(1.0, 0.0)) < 1e-4);
        }
예제 #6
0
        public void SparseGaussianListFactor()
        {
            SparseGaussianList.DefaultTolerance = 1e-10;
            var calcSuffix     = ": calculation differs between sparse and dense";
            var sparsitySuffix = ": result is not sparse as expected";
            var calcErrMsg     = "";
            var sparsityErrMsg = "";
            var tolerance      = 1e-10;

            Rand.Restart(12347);
            int listSize = 50;

            // True distribution for the means
            var sparseMeanDist = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(1, 2), tolerance);

            sparseMeanDist[3] = Gaussian.FromMeanAndPrecision(4, 5);
            sparseMeanDist[6] = Gaussian.FromMeanAndPrecision(7, 8);
            var meanDist        = sparseMeanDist.ToArray();
            var sparseMeanPoint = SparseList <double> .Constant(listSize, 0.1);

            sparseMeanPoint[3] = 0.7;
            sparseMeanPoint[6] = 0.8;
            var meanPoint = sparseMeanPoint.ToArray();

            // True distribution for the precisions
            var sparsePrecDist = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(1.1, 1.2), tolerance);

            sparsePrecDist[3] = Gamma.FromShapeAndRate(2.3, 2.4);
            sparsePrecDist[6] = Gamma.FromShapeAndRate(3.4, 4.5);
            var precDist        = sparsePrecDist.ToArray();
            var sparsePrecPoint = SparseList <double> .Constant(listSize, 0.1);

            sparsePrecPoint[3] = 5.6;
            sparsePrecPoint[6] = 0.5;
            var precPoint = sparsePrecPoint.ToArray();

            var sparseSampleDist = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 1.5), tolerance);

            sparseSampleDist[3] = Gaussian.FromMeanAndPrecision(-0.5, 2.0);
            sparseSampleDist[9] = Gaussian.FromMeanAndPrecision(1.6, 0.4);
            var sampleDist        = sparseSampleDist.ToArray();
            var sparseSamplePoint = SparseList <double> .Constant(listSize, 0.5);

            sparseSamplePoint[3] = 0.1;
            sparseSamplePoint[9] = 2.3;
            var samplePoint = sparseSamplePoint.ToArray();

            var toSparseSampleDist = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(-0.2, 0.3), tolerance);

            toSparseSampleDist[3] = Gaussian.FromMeanAndPrecision(2.1, 3.2);
            toSparseSampleDist[4] = Gaussian.FromMeanAndPrecision(1.3, 0.7);
            var toSampleDist = toSparseSampleDist.ToArray();

            var toSparsePrecDist = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(2.3, 3.4), tolerance);

            toSparsePrecDist[3] = Gamma.FromShapeAndRate(3.4, 4.5);
            toSparsePrecDist[4] = Gamma.FromShapeAndRate(5.6, 6.7);
            var toPrecDist = toSparsePrecDist.ToArray();

            // ---------------------------
            // Check average log factor
            // ---------------------------
            calcErrMsg = "Average log factor" + calcSuffix;
            // Dist, dist, dist
            var sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSampleDist, sparseMeanDist, sparsePrecDist);
            var avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(sampleDist[i], meanDist[i], precDist[i])).Sum();

            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Dist, dist, point
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSampleDist, sparseMeanDist, sparsePrecPoint);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(sampleDist[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Dist, point, dist
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSampleDist, sparseMeanPoint, sparsePrecDist);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(sampleDist[i], meanPoint[i], precDist[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Dist, point, point
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSampleDist, sparseMeanPoint, sparsePrecPoint);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(sampleDist[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Point, dist, dist
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSamplePoint, sparseMeanDist, sparsePrecDist);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(samplePoint[i], meanDist[i], precDist[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Point, dist, point
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSamplePoint, sparseMeanDist, sparsePrecPoint);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(samplePoint[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Point, point, dist
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSamplePoint, sparseMeanPoint, sparsePrecDist);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(samplePoint[i], meanPoint[i], precDist[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);
            // Point, point, point
            sparseAvgLog = SparseGaussianListOp.AverageLogFactor(sparseSamplePoint, sparseMeanPoint, sparsePrecPoint);
            avgLog       = Util.ArrayInit(listSize, i => GaussianOp.AverageLogFactor(samplePoint[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(avgLog - sparseAvgLog) < tolerance, calcErrMsg);

            // ---------------------------
            // Check log average factor
            // ---------------------------
            calcErrMsg = "Log average factor" + calcSuffix;
            var sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSampleDist, sparseMeanDist, sparsePrecDist, toSparsePrecDist);
            var logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(sampleDist[i], meanDist[i], precDist[i], toPrecDist[i])).Sum();

            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Dist, dist, point
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSampleDist, sparseMeanDist, sparsePrecPoint);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(sampleDist[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Dist, point, dist
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSampleDist, sparseMeanPoint, sparsePrecDist, toSparsePrecDist);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(sampleDist[i], meanPoint[i], precDist[i], toPrecDist[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Dist, point, point
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSampleDist, sparseMeanPoint, sparsePrecPoint);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(sampleDist[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Point, dist, dist
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSamplePoint, sparseMeanDist, sparsePrecDist, toSparsePrecDist);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(samplePoint[i], meanDist[i], precDist[i], toPrecDist[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Point, dist, point
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSamplePoint, sparseMeanDist, sparsePrecPoint);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(samplePoint[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Point, point, dist
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSamplePoint, sparseMeanPoint, sparsePrecDist);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(samplePoint[i], meanPoint[i], precDist[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);
            // Point, point, point
            sparseLogAvg = SparseGaussianListOp.LogAverageFactor(sparseSamplePoint, sparseMeanPoint, sparsePrecPoint);
            logAvg       = Util.ArrayInit(listSize, i => GaussianOp.LogAverageFactor(samplePoint[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(logAvg - sparseLogAvg) < tolerance, calcErrMsg);

            // ---------------------------
            // Check log evidence ratio
            // ---------------------------
            calcErrMsg = "Log evidence ratio" + calcSuffix;
            var sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSampleDist, sparseMeanDist, sparsePrecDist, toSparseSampleDist, toSparsePrecDist);
            var evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(sampleDist[i], meanDist[i], precDist[i], toSampleDist[i], toPrecDist[i])).Sum();

            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Dist, dist, point
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSampleDist, sparseMeanDist, sparsePrecPoint);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(sampleDist[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Dist, point, dist
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSampleDist, sparseMeanPoint, sparsePrecDist, toSparseSampleDist, toSparsePrecDist);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(sampleDist[i], meanPoint[i], precDist[i], toSampleDist[i], toPrecDist[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Dist, point, point
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSampleDist, sparseMeanPoint, sparsePrecPoint);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(sampleDist[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Point, dist, dist
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSamplePoint, sparseMeanDist, sparsePrecDist, toSparsePrecDist);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(samplePoint[i], meanDist[i], precDist[i], toPrecDist[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Point, dist, point
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSamplePoint, sparseMeanDist, sparsePrecPoint);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(samplePoint[i], meanDist[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Point, point, dist
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSamplePoint, sparseMeanPoint, sparsePrecDist);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(samplePoint[i], meanPoint[i], precDist[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);
            // Point, point, point
            sparseEvidRat = SparseGaussianListOp.LogEvidenceRatio(sparseSamplePoint, sparseMeanPoint, sparsePrecPoint);
            evidRat       = Util.ArrayInit(listSize, i => GaussianOp.LogEvidenceRatio(samplePoint[i], meanPoint[i], precPoint[i])).Sum();
            TAssert.True(System.Math.Abs(evidRat - sparseEvidRat) < tolerance, calcErrMsg);

            // ---------------------------
            // Check SampleAverageConditional
            // ---------------------------
            calcErrMsg     = "SampleAverageConditional" + calcSuffix;
            sparsityErrMsg = "SampleAverageConditional" + sparsitySuffix;
            // Use different common value to ensure this gets properly set
            var sparseSampleAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);

            sparseSampleAvgConditional = SparseGaussianListOp.SampleAverageConditional(sparseSampleDist, sparseMeanDist, sparsePrecDist, toSparsePrecDist, sparseSampleAvgConditional);
            var sampleAvgConditional = Util.ArrayInit(listSize, i => GaussianOp.SampleAverageConditional(sampleDist[i], meanDist[i], precDist[i], toPrecDist[i]));

            TAssert.True(3 == sparseSampleAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseSampleAvgConditional.MaxDiff(sampleAvgConditional) < tolerance, calcErrMsg);

            sparseSampleAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseSampleAvgConditional = SparseGaussianListOp.SampleAverageConditional(sparseSampleDist, sparseMeanPoint, sparsePrecDist, toSparsePrecDist, sparseSampleAvgConditional);
            sampleAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.SampleAverageConditional(sampleDist[i], meanPoint[i], precDist[i], toPrecDist[i]));
            TAssert.True(3 == sparseSampleAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseSampleAvgConditional.MaxDiff(sampleAvgConditional) < tolerance, calcErrMsg);

            sparseSampleAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseSampleAvgConditional = SparseGaussianListOp.SampleAverageConditional(sparseMeanDist, sparsePrecPoint, sparseSampleAvgConditional);
            sampleAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.SampleAverageConditional(meanDist[i], precPoint[i]));
            TAssert.True(2 == sparseSampleAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseSampleAvgConditional.MaxDiff(sampleAvgConditional) < tolerance, calcErrMsg);

            sparseSampleAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseSampleAvgConditional = SparseGaussianListOp.SampleAverageConditional(sparseMeanPoint, sparsePrecPoint, sparseSampleAvgConditional);
            sampleAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.SampleAverageConditional(meanPoint[i], precPoint[i]));
            TAssert.True(2 == sparseSampleAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseSampleAvgConditional.MaxDiff(sampleAvgConditional) < tolerance, calcErrMsg);

            // ---------------------------
            // Check MeanAverageConditional
            // ---------------------------
            calcErrMsg     = "MeanAverageConditional" + calcSuffix;
            sparsityErrMsg = "MeanAverageConditional" + sparsitySuffix;
            // Use different common value to ensure this gets properly set
            var sparseMeanAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);

            sparseMeanAvgConditional = SparseGaussianListOp.MeanAverageConditional(sparseSampleDist, sparseMeanDist, sparsePrecDist, toSparsePrecDist, sparseMeanAvgConditional);
            var meanAvgConditional = Util.ArrayInit(listSize, i => GaussianOp.MeanAverageConditional(sampleDist[i], meanDist[i], precDist[i], toPrecDist[i]));

            TAssert.True(3 == sparseMeanAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseMeanAvgConditional.MaxDiff(meanAvgConditional) < tolerance, calcErrMsg);

            sparseMeanAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseMeanAvgConditional = SparseGaussianListOp.MeanAverageConditional(sparseSamplePoint, sparseMeanDist, sparsePrecDist, toSparsePrecDist, sparseMeanAvgConditional);
            meanAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.MeanAverageConditional(samplePoint[i], meanDist[i], precDist[i], toPrecDist[i]));
            TAssert.True(3 == sparseMeanAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseMeanAvgConditional.MaxDiff(meanAvgConditional) < tolerance, calcErrMsg);

            sparseMeanAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseMeanAvgConditional = SparseGaussianListOp.MeanAverageConditional(sparseSampleDist, sparsePrecPoint, sparseMeanAvgConditional);
            meanAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.MeanAverageConditional(sampleDist[i], precPoint[i]));
            TAssert.True(3 == sparseMeanAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseMeanAvgConditional.MaxDiff(meanAvgConditional) < tolerance, calcErrMsg);

            sparseMeanAvgConditional = SparseGaussianList.Constant(listSize, Gaussian.FromMeanAndPrecision(0.5, 0.6), tolerance);
            sparseMeanAvgConditional = SparseGaussianListOp.MeanAverageConditional(sparseSamplePoint, sparsePrecPoint, sparseMeanAvgConditional);
            meanAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.MeanAverageConditional(samplePoint[i], precPoint[i]));
            TAssert.True(3 == sparseMeanAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparseMeanAvgConditional.MaxDiff(meanAvgConditional) < tolerance, calcErrMsg);

            // ---------------------------
            // Check PrecisionAverageConditional
            // ---------------------------
            calcErrMsg     = "PrecisionAverageConditional" + calcSuffix;
            sparsityErrMsg = "PrecisionAverageConditional" + sparsitySuffix;
            // Use different common value to ensure this gets properly set
            var sparsePrecAvgConditional = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(2.1, 3.2), tolerance);

            sparsePrecAvgConditional = SparseGaussianListOp.PrecisionAverageConditional(sparseSampleDist, sparseMeanDist, sparsePrecDist, sparsePrecAvgConditional);
            var precAvgConditional = Util.ArrayInit(listSize, i => GaussianOp.PrecisionAverageConditional(sampleDist[i], meanDist[i], precDist[i]));

            TAssert.True(3 == sparsePrecAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparsePrecAvgConditional.MaxDiff(precAvgConditional) < tolerance, calcErrMsg);

            sparsePrecAvgConditional = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(2.1, 3.2), tolerance);
            sparsePrecAvgConditional = SparseGaussianListOp.PrecisionAverageConditional(sparseSamplePoint, sparseMeanDist, sparsePrecDist, sparsePrecAvgConditional);
            precAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.PrecisionAverageConditional(Gaussian.PointMass(samplePoint[i]), meanDist[i], precDist[i]));
            TAssert.True(3 == sparsePrecAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparsePrecAvgConditional.MaxDiff(precAvgConditional) < tolerance, calcErrMsg);

            sparsePrecAvgConditional = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(2.1, 3.2), tolerance);
            sparsePrecAvgConditional = SparseGaussianListOp.PrecisionAverageConditional(sparseSampleDist, sparseMeanPoint, sparsePrecDist, sparsePrecAvgConditional);
            precAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.PrecisionAverageConditional(sampleDist[i], Gaussian.PointMass(meanPoint[i]), precDist[i]));
            TAssert.True(3 == sparsePrecAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparsePrecAvgConditional.MaxDiff(precAvgConditional) < tolerance, calcErrMsg);

            sparsePrecAvgConditional = SparseGammaList.Constant(listSize, Gamma.FromShapeAndRate(2.1, 3.2), tolerance);
            sparsePrecAvgConditional = SparseGaussianListOp.PrecisionAverageConditional(sparseSamplePoint, sparseMeanPoint, sparsePrecAvgConditional);
            precAvgConditional       = Util.ArrayInit(listSize, i => GaussianOp.PrecisionAverageConditional(samplePoint[i], meanPoint[i]));
            TAssert.True(3 == sparsePrecAvgConditional.SparseCount, sparsityErrMsg);
            TAssert.True(sparsePrecAvgConditional.MaxDiff(precAvgConditional) < tolerance, calcErrMsg);
        }
예제 #7
0
        public void Sample(Options options, Matrix data)
        {
            if (options.numParams > 2)
            {
                throw new Exception("numParams > 2");
            }
            int numStudents  = data.Rows;
            int numQuestions = data.Cols;
            // initialize the sampler at the mean of the priors (not sampling from the priors)
            double abilityMean        = abilityMeanPrior.GetMean();
            double abilityPrec        = abilityPrecPrior.GetMean();
            double difficultyMean     = difficultyMeanPrior.GetMean();
            double difficultyPrec     = difficultyPrecPrior.GetMean();
            double discriminationMean = discriminationMeanPrior.GetMean();
            double discriminationPrec = discriminationPrecPrior.GetMean();

            double[]            ability             = new double[numStudents];
            double[]            difficulty          = new double[numQuestions];
            List <double>[]     difficultySamples   = new List <double> [numQuestions];
            GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                difficultyEstimator[question] = new GaussianEstimator();
                difficultySamples[question]   = new List <double>();
                if (difficultyObserved != null)
                {
                    difficulty[question] = difficultyObserved[question];
                    difficultyEstimator[question].Add(difficultyObserved[question]);
                    difficultySamples[question].Add(difficultyObserved[question]);
                }
            }
            List <double>[]     abilitySamples   = new List <double> [numStudents];
            GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length];
            for (int student = 0; student < abilityEstimator.Length; student++)
            {
                abilityEstimator[student] = new GaussianEstimator();
                abilitySamples[student]   = new List <double>();
                if (abilityObserved != null)
                {
                    ability[student] = abilityObserved[student];
                    abilityEstimator[student].Add(abilityObserved[student]);
                    abilitySamples[student].Add(abilityObserved[student]);
                }
            }
            double[]         discrimination          = new double[numQuestions];
            List <double>[]  discriminationSamples   = new List <double> [numQuestions];
            GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions];
            for (int question = 0; question < numQuestions; question++)
            {
                discriminationEstimator[question] = new GammaEstimator();
                discriminationSamples[question]   = new List <double>();
                discrimination[question]          = 1;
                if (discriminationObserved != null)
                {
                    discrimination[question] = discriminationObserved[question];
                    discriminationEstimator[question].Add(discriminationObserved[question]);
                    discriminationSamples[question].Add(discriminationObserved[question]);
                }
            }
            responseProbMean = new Matrix(numStudents, numQuestions);
            int    niters           = options.numberOfSamples;
            int    burnin           = options.burnIn;
            double logisticVariance = Math.PI * Math.PI / 3;
            double shape            = 4.5;
            Gamma  precPrior        = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance);

            precPrior      = Gamma.PointMass(1);
            double[,] prec = new double[numStudents, numQuestions];
            double[,] x    = new double[numStudents, numQuestions];
            int numRejected = 0, numAttempts = 0;

            for (int iter = 0; iter < niters; iter++)
            {
                for (int student = 0; student < numStudents; student++)
                {
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // sample prec given ability, difficulty, x
                        // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2)
                        Gamma  precPost = precPrior;
                        double xMean    = (ability[student] - difficulty[question]) * discrimination[question];
                        double delta    = x[student, question] - xMean;
                        Gamma  like     = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta);
                        precPost.SetToProduct(precPost, like);
                        prec[student, question] = precPost.Sample();
                        // sample x given ability, difficulty, prec, data
                        // using an independence chain MH
                        bool     y      = (data[student, question] > 0);
                        double   sign   = y ? 1.0 : -1.0;
                        Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]);
                        // we want to sample from xPrior*I(x>0)
                        // instead we sample from xPost
                        Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior);
                        double   oldx  = x[student, question];
                        double   newx  = xPost.Sample();
                        numAttempts++;
                        if (newx * sign < 0)
                        {
                            newx = oldx; // rejected
                            numRejected++;
                        }
                        else
                        {
                            // importance weights
                            double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx);
                            double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx);
                            // acceptance ratio
                            double paccept = Math.Exp(neww - oldw);
                            if (paccept < 1 && Rand.Double() > paccept)
                            {
                                newx = oldx; // rejected
                                numRejected++;
                            }
                        }
                        x[student, question] = newx;
                        if (iter >= burnin)
                        {
                            double responseProb = MMath.Logistic(xMean);
                            responseProbMean[student, question] += responseProb;
                        }
                    }
                }
                if (abilityObserved == null)
                {
                    // sample ability given difficulty, prec, x
                    for (int student = 0; student < numStudents; student++)
                    {
                        Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec);
                        for (int question = 0; question < numQuestions; question++)
                        {
                            // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec)
                            Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, abilityLike);
                        }
                        ability[student] = post.Sample();
                        if (iter >= burnin)
                        {
                            abilityEstimator[student].Add(post);
                            abilitySamples[student].Add(ability[student]);
                        }
                    }
                }
                // sample difficulty given ability, prec, x
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec);
                    for (int student = 0; student < numStudents; student++)
                    {
                        // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec)
                        if (discrimination[question] > 0)
                        {
                            Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]);
                            post.SetToProduct(post, like);
                        }
                    }
                    difficulty[question] = post.Sample();
                    if (iter >= burnin)
                    {
                        //if (difficulty[question] > 100)
                        //    Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]);
                        difficultyEstimator[question].Add(post);
                        difficultySamples[question].Add(difficulty[question]);
                    }
                }
                if (options.numParams > 1 && discriminationObserved == null)
                {
                    // sample discrimination given ability, difficulty, prec, x
                    for (int question = 0; question < numQuestions; question++)
                    {
                        // moment-matching on the prior
                        Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1));
                        Gaussian post        = approxPrior;
                        for (int student = 0; student < numStudents; student++)
                        {
                            // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2)
                            double delta = ability[student] - difficulty[question];
                            if (delta > 0)
                            {
                                Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta);
                                post.SetToProduct(post, like);
                            }
                        }
                        TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity);
                        double            olddisc   = discrimination[question];
                        double            newdisc   = postTrunc.Sample();
                        // importance weights
                        Func <double, double> priorLogProb = delegate(double d)
                        {
                            double logd = Math.Log(d);
                            return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd);
                        };
                        double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc);
                        double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc);
                        // acceptance ratio
                        double paccept = Math.Exp(neww - oldw);
                        if (paccept < 1 && Rand.Double() > paccept)
                        {
                            // rejected
                        }
                        else
                        {
                            discrimination[question] = newdisc;
                        }
                        if (iter >= burnin)
                        {
                            discriminationEstimator[question].Add(discrimination[question]);
                            discriminationSamples[question].Add(discrimination[question]);
                        }
                    }
                }
                // sample abilityMean given ability, abilityPrec
                Gaussian abilityMeanPost = abilityMeanPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec);
                    abilityMeanPost *= like;
                }
                abilityMean = abilityMeanPost.Sample();
                // sample abilityPrec given ability, abilityMean
                Gamma abilityPrecPost = abilityPrecPrior;
                for (int student = 0; student < numStudents; student++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean);
                    abilityPrecPost *= like;
                }
                abilityPrec = abilityPrecPost.Sample();
                // sample difficultyMean given difficulty, difficultyPrec
                Gaussian difficultyMeanPost = difficultyMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec);
                    difficultyMeanPost *= like;
                }
                difficultyMean = difficultyMeanPost.Sample();
                // sample difficultyPrec given difficulty, difficultyMean
                Gamma difficultyPrecPost = difficultyPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean);
                    difficultyPrecPost *= like;
                }
                difficultyPrec = difficultyPrecPost.Sample();
                // sample discriminationMean given discrimination, discriminationPrec
                Gaussian discriminationMeanPost = discriminationMeanPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec);
                    discriminationMeanPost *= like;
                }
                discriminationMean = discriminationMeanPost.Sample();
                // sample discriminationPrec given discrimination, discriminationMean
                Gamma discriminationPrecPost = discriminationPrecPrior;
                for (int question = 0; question < numQuestions; question++)
                {
                    Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean);
                    discriminationPrecPost *= like;
                }
                discriminationPrec = discriminationPrecPost.Sample();
                //if (iter % 1 == 0)
                //    Console.WriteLine("iter = {0}", iter);
            }
            //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec);
            //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec);
            int numSamplesUsed = niters - burnin;

            responseProbMean.Scale(1.0 / numSamplesUsed);
            //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts);
            difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            abilityPost    = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform()));
            if (options.numParams > 1)
            {
                discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma()));
            }
            abilityCred    = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples);
            difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples);
            bool saveSamples = false;

            if (saveSamples)
            {
                using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat"))
                {
                    int q = 11;
                    writer.Write("difficulty", difficultySamples[q]);
                    writer.Write("discrimination", discriminationSamples[q]);
                }
            }
        }
 /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SparseGaussianListOp"]/message_doc[@name="PrecisionAverageConditional(ISparseList{double}, ISparseList{double}, SparseGammaList)"]/*'/>
 public static SparseGammaList PrecisionAverageConditional(ISparseList <double> sample, ISparseList <double> mean, SparseGammaList result)
 {
     result.SetToFunction(sample, mean, (s, m) => GaussianOp.PrecisionAverageConditional(s, m));
     return(result);
 }