예제 #1
0
        public static Gaussian FindxF4(double[] xs, double[] logTrue, Gaussian xF)
        {
            double[] logApprox         = new double[xs.Length];
            Func <Vector, double> func = delegate(Vector x2)
            {
                Gaussian xFt = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1]));
                for (int i = 0; i < xs.Length; i++)
                {
                    logApprox[i] = xFt.GetLogProb(xs[i]);
                }
                Normalize(logApprox);
                double sum = 0;
                for (int i = 0; i < xs.Length; i++)
                {
                    sum += System.Math.Abs(System.Math.Exp(logApprox[i]) - System.Math.Exp(logTrue[i]));
                    //sum += Math.Pow(Math.Exp(logApprox[i]) - Math.Exp(logTrue[i]), 2);
                    //sum += Math.Pow(Math.Exp(logApprox[i]/2) - Math.Exp(logTrue[i]/2), 2);
                    //sum += Math.Exp(logApprox[i])*(logApprox[i] - logTrue[i]);
                    //sum += Math.Exp(logTrue[i])*(logTrue[i] - logApprox[i]);
                }
                return(sum);
            };

            double m = xF.GetMean();
            double p = xF.Precision;
            Vector x = Vector.FromArray(m, System.Math.Log(p));

            Minimize2(func, x);
            return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1])));
        }
예제 #2
0
        public void JaggedSetValues2()
        {
            Array jaggedGaussian = JaggedArray.ConvertToNew(
                jagged, typeof(int), typeof(Gaussian),
                delegate(object elt) { return(new Gaussian(1.0, 1.0)); });

            Assert.Equal(
                JaggedArray.GetLength(jaggedGaussian, typeof(Gaussian)),
                JaggedArray.GetLength(jagged, typeof(int)));

            JaggedArray.ConvertElements2(
                jaggedGaussian, jagged, typeof(Gaussian),
                delegate(object elt1, object elt2)
            {
                Gaussian g = (Gaussian)elt1;
                double d   = (int)elt2;
                return(Gaussian.FromMeanAndPrecision(d + g.GetMean(), 2.0));
            });

            int i = 0;

            foreach (Gaussian g in JaggedArray.ElementIterator(jaggedGaussian, typeof(Gaussian)))
            {
                Assert.Equal(g.GetMean(), 1.0 + arrExpected[i++]);
            }
        }
예제 #3
0
        public static Discrete RunMultipleCyclistInference(Dictionary <int, double[]> trainingData)
        {
            ModelData initPriors = new ModelData(
                Gaussian.FromMeanAndPrecision(29.5, 0.01),
                Gamma.FromShapeAndScale(1.0, 0.5));

            //Train the model
            MultipleCyclistsTraining cyclistsTraining = new MultipleCyclistsTraining();

            cyclistsTraining.CreateModel();
            cyclistsTraining.SetModelData(initPriors);

            ModelData[] posteriors1 = cyclistsTraining.InferModelData(trainingData[0], trainingData[1], trainingData[2], trainingData[3], trainingData[4], trainingData[5]);

            Console.WriteLine("Cyclist 1 average travel time: {0}", posteriors1[0].AverageTimeDist);
            Console.WriteLine("Cyclist 1 traffic noise: {0}", posteriors1[0].TrafficNoiseDist);

            //Make predictions based on the trained model
            MultipleCyclistsPrediction cyclistsPrediction = new MultipleCyclistsPrediction();

            cyclistsPrediction.CreateModel();
            cyclistsPrediction.SetModelData(posteriors1);

            Gaussian[] posteriors2 = cyclistsPrediction.InferTomorrowsTime();

            return(cyclistsPrediction.InferWinner());
        }
        private void IsPositiveModel()
        {
            double a = Factor.Random(Gaussian.FromMeanAndPrecision(1.2, 2.3));
            bool   c = Factor.IsPositive(a);

            InferNet.Infer(c, nameof(c));
        }
예제 #5
0
        public static void RunCyclingTime4()
        {
            double[]  trainingData = new double[] { 13, 17, 16, 12, 13, 12, 14, 18, 16, 16, 27, 32 };
            ModelData initPriors   = new ModelData(
                Gaussian.FromMeanAndPrecision(15.0, 0.01),
                Gamma.FromShapeAndScale(2.0, 0.5));
            CyclistWithEvidence cyclistWithEvidence = new CyclistWithEvidence();

            cyclistWithEvidence.CreateModel();
            cyclistWithEvidence.SetModelData(initPriors);

            double logEvidence = cyclistWithEvidence.InferEvidence(trainingData);

            ModelDataMixed initPriorsMixed;

            initPriorsMixed.AverageTimeDist  = new Gaussian[] { new Gaussian(15.0, 100), new Gaussian(30.0, 100) };
            initPriorsMixed.TrafficNoiseDist = new Gamma[] { new Gamma(2.0, 0.5), new Gamma(2.0, 0.5) };
            initPriorsMixed.MixingDist       = new Dirichlet(1, 1);

            CyclistMixedWithEvidence cyclistMixedWithEvidence = new CyclistMixedWithEvidence();

            cyclistMixedWithEvidence.CreateModel();
            cyclistMixedWithEvidence.SetModelData(initPriorsMixed);

            double logEvidenceMixed = cyclistMixedWithEvidence.InferEvidence(trainingData);

            Console.WriteLine("Log evidence for single Gaussian: {0:f2}", logEvidence);
            Console.WriteLine("Log evidence for mixture of two Gaussians: {0:f2}", logEvidenceMixed);
        }
        private void Max_APointMass(Gaussian max, Gaussian b)
        {
            double   point   = 3;
            Gaussian toPoint = MaxGaussianOp.AAverageConditional(max, Gaussian.PointMass(point), b);

            //Console.WriteLine($"{point} {toPoint} {toPoint.MeanTimesPrecision:g17} {toPoint.Precision:g17}");
            if (max.IsPointMass && b.IsPointMass)
            {
                Gaussian toUniform = MaxGaussianOp.AAverageConditional(max, Gaussian.Uniform(), b);
                if (max.Point > b.Point)
                {
                    Assert.Equal(toUniform, max);
                }
                else
                {
                    Assert.Equal(toUniform, Gaussian.Uniform());
                }
            }
            double oldDiff = double.PositiveInfinity;

            for (int i = 3; i < 100; i++)
            {
                Gaussian a    = Gaussian.FromMeanAndPrecision(point, System.Math.Pow(10, i));
                Gaussian to_a = MaxGaussianOp.AAverageConditional(max, a, b);
                double   diff = toPoint.MaxDiff(to_a);
                //Console.WriteLine($"{a} {to_a} {to_a.MeanTimesPrecision:g17} {to_a.Precision:g17} {diff:g17}");
                if (diff < 1e-14)
                {
                    diff = 0;
                }
                Assert.True(diff <= oldDiff);
                oldDiff = diff;
            }
        }
예제 #7
0
 public void GaussianOpPrecision_IsMonotonicInSampleVariance()
 {
     using (TestUtils.TemporarilyAllowGaussianImproperMessages)
     {
         Gaussian mean = Gaussian.PointMass(0);
         for (int logRate = 0; logRate < 310; logRate++)
         {
             Gamma  precision    = Gamma.FromShapeAndRate(300, System.Math.Pow(10, logRate));
             double previousRate = double.PositiveInfinity;
             for (int i = 0; i < 310; i++)
             {
                 Gaussian sample  = Gaussian.FromMeanAndPrecision(0, System.Math.Pow(10, -i));
                 Gamma    precMsg = GaussianOp.PrecisionAverageConditional(sample, mean, precision);
                 //precMsg = GaussianOp_Laplace.PrecisionAverageConditional_slow(sample, mean, precision);
                 //Gamma precMsg2 = GaussianOp_Slow.PrecisionAverageConditional(sample, mean, precision);
                 //Console.WriteLine("{0}: {1} should be {2}", sample, precMsg, precMsg2);
                 Gamma post = precMsg * precision;
                 //Trace.WriteLine($"{sample}: {precMsg.Rate} post = {post.Rate}");
                 if (i >= logRate)
                 {
                     Assert.True(precMsg.Rate <= previousRate);
                 }
                 previousRate = precMsg.Rate;
             }
         }
     }
 }
예제 #8
0
        public static Gaussian FindxB(Gaussian xB, Gaussian meanPrior, Gamma precPrior, Gaussian xF)
        {
            Gaussian xB3 = IsPositiveOp.XAverageConditional(true, xF);
            Func <Vector, double> func = delegate(Vector x2)
            {
                Gaussian xB2 = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1]));
                //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB2, meanPrior, precPrior);
                Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB2, meanPrior, precPrior);
                //Assert.True(xF2.MaxDiff(xF3) < 1e-10);
                //return Math.Pow((xF*xB2).GetMean() - (xF2*xB2).GetMean(), 2) + Math.Pow((xF*xB2).GetVariance() - (xF2*xB2).GetVariance(), 2);
                //return KlDiv(xF2*xB2, xF*xB2) + KlDiv(xF*xB3, xF*xB2);
                //return KlDiv(xF2*xB2, xF*xB2) + Math.Pow((xF*xB3).GetMean() - (xF*xB2).GetMean(),2);
                return(MeanError(xF2 * xB2, xF * xB2) + KlDiv(xF * xB3, xF * xB2));
                //return xF.MaxDiff(xF2);
                //Gaussian q = new Gaussian(0, 0.1);
                //return Math.Pow((xF*q).GetMean() - (xF2*q).GetMean(), 2) + Math.Pow((xF*q).GetVariance() - (xF2*q).GetVariance(), 2);
            };

            double m = xB.GetMean();
            double p = xB.Precision;
            Vector x = Vector.FromArray(m, System.Math.Log(p));

            Minimize2(func, x);
            return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1])));
        }
예제 #9
0
        public static Gaussian FindxF3(Gaussian xExpected, double evExpected, Gaussian meanPrior, Gamma precPrior, Gaussian xF)
        {
            Func <Vector, double> func = delegate(Vector x2)
            {
                Gaussian xFt = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1]));
                Gaussian xB  = IsPositiveOp.XAverageConditional(true, xFt);
                Gaussian xM  = xFt * xB;
                //return KlDiv(xExpected, xM);
                return(KlDiv(xM, xExpected));
                //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB, meanPrior, precPrior);
                //Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior);
                //Gaussian xM2 = xF2*xB;
                //double ev1 = IsPositiveOp.LogAverageFactor(true, xFt);
                //double ev2 = GaussianOp.LogAverageFactor_slow(xB, meanPrior, precPrior) - xFt.GetLogAverageOf(xB);
                //double ev = ev1 + ev2;
                //return xExpected.MaxDiff(xM);
                //return Math.Pow(xExpected.GetMean() - xM.GetMean(), 2) + Math.Pow(ev - Math.Log(evExpected), 2);
                //return 100*Math.Pow(xM.GetMean() - xM2.GetMean(), 2) -ev;
                //return 100*Math.Pow(ev2, 2) + Math.Pow(ev - Math.Log(evExpected), 2);
                //return 100*Math.Pow(ev2, 2) + Math.Pow(xM2.GetMean() - xM.GetMean(), 2);
            };

            double m = xF.GetMean();
            double p = xF.Precision;
            Vector x = Vector.FromArray(m, System.Math.Log(p));

            Minimize2(func, x);
            return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1])));
        }
        public static WrappedGaussian AngleAverageLogarithm([SkipIfUniform] VectorGaussian rotate, double x, double y)
        {
            if (rotate.Dimension != 2)
            {
                throw new ArgumentException("rotate.Dimension (" + rotate.Dimension + ") != 2");
            }
            double rPrec = rotate.Precision[0, 0];

            if (rotate.Precision[0, 1] != 0)
            {
                throw new ArgumentException("rotate.Precision is not diagonal");
            }
            if (rotate.Precision[1, 1] != rPrec)
            {
                throw new ArgumentException("rotate.Precision is not spherical");
            }
#if false
            Vector rotateMean = rotate.GetMean();
            double a          = x * rotateMean[0] + y * rotateMean[1];
            double b          = x * rotateMean[1] - y * rotateMean[0];
#else
            double rotateMean0 = rotate.MeanTimesPrecision[0] / rotate.Precision[0, 0];
            double rotateMean1 = rotate.MeanTimesPrecision[1] / rotate.Precision[1, 1];
            double a           = x * rotateMean0 + y * rotateMean1;
            double b           = x * rotateMean1 - y * rotateMean0;
#endif
            double c      = Math.Sqrt(a * a + b * b) * rPrec;
            double angle0 = Math.Atan2(b, a);
            // the exact conditional is exp(c*cos(angle - angle0)) which is a von Mises distribution.
            // we will approximate this with a Gaussian lower bound that makes contact at the mode.
            WrappedGaussian result = WrappedGaussian.Uniform();
            result.Gaussian = Gaussian.FromMeanAndPrecision(angle0, c);
            return(result);
        }
        private void IsBetweenModel()
        {
            double a = Factor.Random(Gaussian.FromMeanAndPrecision(0.1, 0.2));
            double b = Factor.Random(Gaussian.FromMeanAndPrecision(0.3, 0.4));
            double x = Factor.Random(Gaussian.FromMeanAndPrecision(0.2, 0.5));
            bool   c = Factor.IsBetween(x, a, b);

            InferNet.Infer(c, nameof(c));
        }
예제 #12
0
        public static void RunCyclingTime2()
        {
            double[]  trainingData = new double[] { 13, 17, 16, 12, 13, 12, 14, 18, 16, 16 };
            ModelData initPriors   = new ModelData(
                Gaussian.FromMeanAndPrecision(1.0, 0.01),
                Gamma.FromShapeAndScale(2.0, 0.5));

            // Train the model
            CyclistTraining cyclistTraining = new CyclistTraining();

            cyclistTraining.CreateModel();
            cyclistTraining.SetModelData(initPriors);

            ModelData posteriors1 = cyclistTraining.InferModelData(trainingData);

            Console.WriteLine("Average travel time = " + posteriors1.AverageTimeDist);
            Console.WriteLine("Traffic noise = " + posteriors1.TrafficNoiseDist);

            // Make predictions based on the trained model
            CyclistPrediction cyclistPrediction = new CyclistPrediction();

            cyclistPrediction.CreateModel();
            cyclistPrediction.SetModelData(posteriors1);

            Gaussian tomorrowsTimeDist = cyclistPrediction.InferTomorrowsTime();

            double tomorrowsMean   = tomorrowsTimeDist.GetMean();
            double tomorrowsStdDev = Math.Sqrt(tomorrowsTimeDist.GetVariance());

            Console.WriteLine("Tomorrows average time: {0:f2}", tomorrowsMean);
            Console.WriteLine("Tomorrows standard deviation: {0:f2}", tomorrowsStdDev);
            Console.WriteLine("Probability that tomorrow's time is < 18 min: {0}",
                              cyclistPrediction.InferProbabilityTimeLessThan(18.0));

            // Second round of training
            double[] trainingData2 = new double[] { 17, 19, 18, 21, 15 };

            cyclistTraining.SetModelData(posteriors1);
            ModelData posteriors2 = cyclistTraining.InferModelData(trainingData2);

            Console.WriteLine("\n2nd training pass");
            Console.WriteLine("Average travel time = " + posteriors2.AverageTimeDist);
            Console.WriteLine("Traffic noise = " + posteriors2.TrafficNoiseDist);

            // Predictions based on two rounds of training
            cyclistPrediction.SetModelData(posteriors2);

            tomorrowsTimeDist = cyclistPrediction.InferTomorrowsTime();
            tomorrowsMean     = tomorrowsTimeDist.GetMean();
            tomorrowsStdDev   = Math.Sqrt(tomorrowsTimeDist.GetVariance());

            Console.WriteLine("Tomorrows average time: {0:f2}", tomorrowsMean);
            Console.WriteLine("Tomorrows standard deviation: {0:f2}", tomorrowsStdDev);
            Console.WriteLine("Probability that tomorrow's time is < 18 min: {0}",
                              cyclistPrediction.InferProbabilityTimeLessThan(18));
        }
예제 #13
0
        internal void StudentIsPositiveTest4()
        {
            double shape     = 1;
            Gamma  precPrior = Gamma.FromShapeAndRate(shape, shape);
            // mean=-1 causes improper messages
            double   mean      = -1;
            Gaussian meanPrior = Gaussian.PointMass(mean);
            double   evExpected;
            Gaussian xExpected = StudentIsPositiveExact(mean, precPrior, out evExpected);

            GaussianOp.ForceProper       = false;
            GaussianOp_Laplace.modified  = true;
            GaussianOp_Laplace.modified2 = true;
            Gaussian xF = Gaussian.Uniform();
            Gaussian xB = Gaussian.Uniform();
            Gamma    q  = GaussianOp_Laplace.QInit();
            double   r0 = 0.38;

            r0 = 0.1;
            for (int iter = 0; iter < 20; iter++)
            {
                q = GaussianOp_Laplace.Q(xB, meanPrior, precPrior, q);
                //xF = GaussianOp_Laplace.SampleAverageConditional(xB, meanPrior, precPrior, q);
                xF = Gaussian.FromMeanAndPrecision(mean, r0);
                xB = IsPositiveOp.XAverageConditional(true, xF);
                Console.WriteLine("xF = {0} xB = {1}", xF, xB);
            }
            Console.WriteLine("x = {0} should be {1}", xF * xB, xExpected);

            double[] precs     = EpTests.linspace(1e-3, 5, 100);
            double[] evTrue    = new double[precs.Length];
            double[] evApprox  = new double[precs.Length];
            double[] evApprox2 = new double[precs.Length];
            //r0 = q.GetMean();
            double sum = 0, sum2 = 0;

            for (int i = 0; i < precs.Length; i++)
            {
                double   r   = precs[i];
                Gaussian xFt = Gaussian.FromMeanAndPrecision(mean, r);
                evTrue[i]    = IsPositiveOp.LogAverageFactor(true, xFt) + precPrior.GetLogProb(r);
                evApprox[i]  = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r) + xB.GetLogAverageOf(xFt) - xB.GetLogAverageOf(xF);
                evApprox2[i] = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r0) + q.GetLogProb(r) - q.GetLogProb(r0);
                sum         += System.Math.Exp(evApprox[i]);
                sum2        += System.Math.Exp(evApprox2[i]);
            }
            Console.WriteLine("r0 = {0}: {1} {2} {3}", r0, sum, sum2, q.GetVariance() + System.Math.Pow(r0 - q.GetMean(), 2));
            //TODO: change path for cross platform using
            using (var writer = new MatlabWriter(@"..\..\..\Tests\student.mat"))
            {
                writer.Write("z", evTrue);
                writer.Write("z2", evApprox);
                writer.Write("z3", evApprox2);
                writer.Write("precs", precs);
            }
        }
예제 #14
0
        /// <summary>
        /// Find the Laplace approximation for Beta(Logistic(x)) * Gaussian(x))
        /// </summary>
        /// <param name="beta">Beta distribution</param>
        /// <param name="gauss">Gaussian distribution</param>
        /// <returns>A proposal distribution</returns>
        public static Gaussian LogisticProposalDistribution(Beta beta, Gaussian gauss)
        {
            if (beta.IsUniform())
            {
                return(new Gaussian(gauss));
            }

            // if gauss is uniform, m,p = 0 below, and the following code will just ignore the Gaussian
            // and do a Laplace approximation for Beta(Logistic(x))

            double c = beta.TrueCount - 1;
            double d = beta.FalseCount - 1;
            double m = gauss.GetMean();
            double p = gauss.Precision;
            // We want to find the mode of
            // ln(g(x)) = c.ln(f(x)) + d.ln(1 - f(x)) - 0.5p((x - m)^2) + constant
            // First deriv:
            // h(x) = (ln(g(x))' = c.(1 - f(x)) - d.f(x) - p(x-m)
            // Second deriv:
            // h'(x) = (ln(g(x))' = -(c+d).f'(x) - p
            // Use Newton-Raphson to find unique root of h(x).
            // g(x) is log-concave so Newton-Raphson should converge quickly.
            // Set the initial point by projecting beta
            // to a Gaussian and taking the mean of the product:
            double bMean, bVar;

            beta.GetMeanAndVariance(out bMean, out bVar);
            Gaussian prod            = new Gaussian();
            double   invLogisticMean = Math.Log(bMean) - Math.Log(1.0 - bMean);

            prod.SetToProduct(Gaussian.FromMeanAndVariance(invLogisticMean, bVar), gauss);
            double xnew = prod.GetMean();
            double x = 0, fx, dfx, hx, dhx = 0;
            int    maxIters = 100; // Should only need a handful of iters
            int    cnt      = 0;

            do
            {
                x   = xnew;
                fx  = MMath.Logistic(x);
                dfx = fx * (1.0 - fx);
                // Find the root of h(x)
                hx   = c * (1.0 - fx) - d * fx - p * (x - m);
                dhx  = -(c + d) * dfx - p;
                xnew = x - (hx / dhx); // The Newton step
                if (Math.Abs(x - xnew) < 0.00001)
                {
                    break;
                }
            } while (++cnt < maxIters);
            if (cnt >= maxIters)
            {
                throw new InferRuntimeException("Unable to find proposal distribution mode");
            }
            return(Gaussian.FromMeanAndPrecision(x, -dhx));
        }
        public void MP_Sum()
        {
            InferenceEngine engine = new InferenceEngine();

            engine.Compiler.DeclarationProvider = Microsoft.ML.Probabilistic.Compiler.RoslynDeclarationProvider.Instance;
            Gaussian[] aPrior = new Gaussian[2];
            aPrior[0] = Gaussian.FromMeanAndPrecision(0.1, 0.2);
            aPrior[1] = Gaussian.FromMeanAndPrecision(0.3, 0.4);
            var ca = engine.Compiler.Compile(SumModel, aPrior);

            ca.Execute(1);
            Gaussian cMarg = ca.Marginal <Gaussian>("c");
        }
예제 #16
0
        public void Run()
        {
            BayesianPCAModel bpca = new BayesianPCAModel();

            if (!(bpca.engine.Algorithm is Algorithms.VariationalMessagePassing))
            {
                Console.WriteLine("This example only runs with Variational Message Passing");
                return;
            }

            // Set a stable random number seed for repeatable runs
            Rand.Restart(12347);
            double[,] data = generateData(1000);

            // Set the data
            bpca.vData.ObservedValue = data;

            // Set the dimensions
            bpca.vN.ObservedValue = data.GetLength(0);
            bpca.vD.ObservedValue = data.GetLength(1);
            bpca.vM.ObservedValue = 6;

            // Set the priors
            bpca.priorMu.ObservedValue    = Gaussian.FromMeanAndPrecision(0.0, 0.01);
            bpca.priorPi.ObservedValue    = Gamma.FromShapeAndRate(2.0, 2.0);
            bpca.priorAlpha.ObservedValue = Gamma.FromShapeAndRate(2.0, 2.0);

            // Initialize the W marginal to break symmetry
            bpca.vW.InitialiseTo(randomGaussianArray(bpca.vM.ObservedValue, bpca.vD.ObservedValue));

            // Infer the marginals
            bpca.engine.NumberOfIterations = 200;
            Gaussian[,] inferredW          = bpca.engine.Infer <Gaussian[, ]>(bpca.vW);
            Gaussian[] inferredMu = bpca.engine.Infer <Gaussian[]>(bpca.vMu);
            Gamma[]    inferredPi = bpca.engine.Infer <Gamma[]>(bpca.vPi);

            // Print out the results
            Console.WriteLine("Inferred W:");
            printMatrixToConsole(inferredW);
            Console.Write("Mean absolute means of rows in W: ");
            printVectorToConsole(meanAbsoluteRowMeans(inferredW));
            Console.Write("    True bias: ");
            printVectorToConsole(trueMu);
            Console.Write("Inferred bias: ");
            printVectorToConsole(inferredMu);
            Console.Write("    True noise:");
            printVectorToConsole(truePi);
            Console.Write("Inferred noise:");
            printVectorToConsole(inferredPi);
            Console.WriteLine();
        }
예제 #17
0
        public void Run()
        {
            BayesianPCAModel bpca = new BayesianPCAModel();

            if (!(bpca.engine.Algorithm is Algorithms.VariationalMessagePassing))
            {
                Console.WriteLine("This example only runs with Variational Message Passing");
                return;
            }

            // Set a stable random number seed for repeatable runs
            Rand.Restart(12347);
            double[,] data = generateData(1000);

            // Set the data
            bpca.data.ObservedValue = data;

            // Set the dimensions
            bpca.observationCount.ObservedValue = data.GetLength(0);
            bpca.featureCount.ObservedValue     = data.GetLength(1);
            bpca.componentCount.ObservedValue   = 6;

            // Set the priors
            bpca.priorMu.ObservedValue    = Gaussian.FromMeanAndPrecision(0.0, 0.01);
            bpca.priorPi.ObservedValue    = Gamma.FromShapeAndRate(2.0, 2.0);
            bpca.priorAlpha.ObservedValue = Gamma.FromShapeAndRate(2.0, 2.0);

            // Set the initialization
            bpca.initW.ObservedValue = randomGaussianArray(bpca.componentCount.ObservedValue, bpca.featureCount.ObservedValue);

            // Infer the marginals
            bpca.engine.NumberOfIterations = 200;
            var inferredW  = bpca.engine.Infer <IArray2D <Gaussian> >(bpca.W);
            var inferredMu = bpca.engine.Infer <IReadOnlyList <Gaussian> >(bpca.mu);
            var inferredPi = bpca.engine.Infer <IReadOnlyList <Gamma> >(bpca.pi);

            // Print out the results
            Console.WriteLine("Inferred W:");
            printMatrixToConsole(inferredW);
            Console.Write("Mean absolute means of rows in W: ");
            printVectorToConsole(meanAbsoluteRowMeans(inferredW));
            Console.Write("    True bias: ");
            printVectorToConsole(trueMu);
            Console.Write("Inferred bias: ");
            printVectorToConsole(inferredMu.Select(d => d.GetMean()));
            Console.Write("    True noise:");
            printVectorToConsole(truePi);
            Console.Write("Inferred noise:");
            printVectorToConsole(inferredPi.Select(d => d.GetMean()));
            Console.WriteLine();
        }
 private void MatrixMultiplyModel()
 {
     double[,] a = new double[3, 2];
     double[,] b = new double[2, 3];
     double[,] c = new double[3, 3];
     for (int i = 0; i < 3; i++)
     {
         for (int j = 0; j < 2; j++)
         {
             a[i, j] = Factor.Random(Gaussian.FromMeanAndPrecision(1.2, 3.4));
             b[j, i] = Factor.Random(Gaussian.FromMeanAndPrecision(4.3, 2.1));
         }
     }
     c = Factor.MatrixMultiply(a, b);
     InferNet.Infer(c, nameof(c));
 }
예제 #19
0
        private static Gaussian GetConstrainedMessage1(Gaussian sample, Gaussian mean, Gamma precision, Gaussian to_sample)
        {
            Gaussian sampleMarginal = sample * to_sample;
            double   m1, v1;

            to_sample.GetMeanAndVariance(out m1, out v1);
            double m, v;

            sampleMarginal.GetMeanAndVariance(out m, out v);
            double moment2 = m * m + v;
            // vq < moment2 implies 1/vq > 1/moment2
            // implies 1/v2 > 1/moment2 - to_sample.Precision
            double v2max = 1 / (1 / moment2 - to_sample.Precision);
            double v2min = 1e-2;

            double[] v2s   = EpTests.linspace(v2min, v2max, 100);
            double   p2min = 1 / moment2 - to_sample.Precision;

            if (p2min < 0.0)
            {
                return(to_sample);
            }
            double p2max = sample.Precision * 10;

            double[] p2s        = EpTests.linspace(p2min, p2max, 100);
            Gaussian bestResult = to_sample;
            double   bestScore  = double.PositiveInfinity;

            for (int i = 0; i < p2s.Length; i++)
            {
                double p2 = p2s[i];
                double vq = 1 / (to_sample.Precision + p2);
                double m2 = (System.Math.Sqrt(moment2 - vq) / vq - to_sample.MeanTimesPrecision) / p2;
                // check
                double mq = vq * (to_sample.MeanTimesPrecision + m2 * p2);
                Assert.True(MMath.AbsDiff(mq * mq + vq, moment2) < 1e-10);
                Gaussian sample2 = Gaussian.FromMeanAndPrecision(m2, p2);
                Gaussian result  = GaussianOp.SampleAverageConditional_slow(sample2, mean, precision);
                double   score   = System.Math.Abs(result.MeanTimesPrecision);
                if (score < bestScore)
                {
                    bestScore  = score;
                    bestResult = result;
                }
            }
            return(bestResult);
        }
예제 #20
0
        public static Gaussian FindxF2(Gaussian meanPrior, Gamma precPrior, Gaussian xF)
        {
            Func <Vector, double> func = delegate(Vector x2)
            {
                Gaussian xFt = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1]));
                Gaussian xB  = IsPositiveOp.XAverageConditional(true, xFt);
                Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior);
                return(xFt.MaxDiff(xF2));
            };

            double m = xF.GetMean();
            double p = xF.Precision;
            Vector x = Vector.FromArray(m, System.Math.Log(p));

            Minimize2(func, x);
            return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1])));
        }
예제 #21
0
        public void GenData(int n, int seed, out double[] data,
                            out double trueR2, out double truePrec)
        {
            Rand.Restart(seed);
            Gamma r2Dist = Gamma.FromShapeAndRate(Shape1, Rate1);

            trueR2 = r2Dist.Sample();
            Gamma precDist = Gamma.FromShapeAndRate(Shape2, trueR2);

            truePrec = precDist.Sample();
            data     = new double[n];
            for (int i = 0; i < n; i++)
            {
                Gaussian xDist = Gaussian.FromMeanAndPrecision(GaussMean, truePrec);
                data[i] = xDist.Sample();
            }
        }
예제 #22
0
 /// <summary>
 /// Priors from the community posteriors, using the means of the weight mean and weight precision posteriors.
 /// </summary>
 /// <param name="posteriors">The posteriors.</param>
 /// <param name="featureSet">The feature set.</param>
 /// <param name="thresholdAndNoiseVariance">The threshold and noise variance.</param>
 /// <returns>The <see cref="Priors" /></returns>
 internal static Priors FromCommunityPosteriors(CommunityPosteriors posteriors, FeatureSet featureSet, double thresholdAndNoiseVariance)
 {
     return(new Priors
     {
         Weights =
             featureSet.FeatureBuckets.ToDictionary(
                 ia => ia,
                 ia =>
                 posteriors.WeightMeans.ContainsKey(ia) && posteriors.WeightPrecisions.ContainsKey(ia)
                                ? Gaussian.FromMeanAndPrecision(
                     posteriors.WeightMeans[ia].GetMean(),
                     posteriors.WeightPrecisions[ia].GetMean())
                                : Gaussian.FromMeanAndVariance(0.0, thresholdAndNoiseVariance)),
         Threshold = Gaussian.FromMeanAndVariance(0.0, thresholdAndNoiseVariance),
         NoiseVariance = thresholdAndNoiseVariance
     });
 }
예제 #23
0
 internal void GaussianOpPrecision3()
 {
     using (TestUtils.TemporarilyAllowGaussianImproperMessages)
     {
         Gaussian mean      = Gaussian.PointMass(0);
         Gamma    precision = Gamma.FromShapeAndRate(2, 10);
         for (int i = -10; i < 10; i++)
         {
             Gaussian sample  = Gaussian.FromMeanAndPrecision(0, System.Math.Pow(10, -i));
             Gamma    precMsg = GaussianOp.PrecisionAverageConditional(sample, mean, precision);
             //precMsg = GaussianOp_Laplace.PrecisionAverageConditional_slow(sample, mean, precision);
             //Gamma precMsg2 = GaussianOp_Slow.PrecisionAverageConditional(sample, mean, precision);
             //Console.WriteLine("{0}: {1} should be {2}", sample, precMsg, precMsg2);
             Gamma post = precMsg * precision;
             Console.WriteLine("{0}: {1} post = {2}", sample, precMsg.Rate, post.Rate);
         }
     }
 }
        public static Gaussian XAverageLogarithm([SkipIfUniform] VectorGaussian rotate, [Proper] WrappedGaussian angle)
        {
            // for x ~ N(m,v):
            // E[cos(x)] = cos(m)*exp(-v/2)
            // E[sin(x)] = sin(m)*exp(-v/2)
            if (angle.Period != 2 * Math.PI)
            {
                throw new ArgumentException("angle.Period (" + angle.Period + ") != 2*PI (" + 2 * Math.PI + ")");
            }
            double angleMean, angleVar;

            angle.Gaussian.GetMeanAndVariance(out angleMean, out angleVar);
            double expVar = Math.Exp(-0.5 * angleVar);
            double mCos   = Math.Cos(angleMean) * expVar;
            double mSin   = Math.Sin(angleMean) * expVar;

            if (rotate.Dimension != 2)
            {
                throw new ArgumentException("rotate.Dimension (" + rotate.Dimension + ") != 2");
            }
            double prec = rotate.Precision[0, 0];

            if (rotate.Precision[0, 1] != 0)
            {
                throw new ArgumentException("rotate.Precision is not diagonal");
            }
            if (rotate.Precision[1, 1] != prec)
            {
                throw new ArgumentException("rotate.Precision is not spherical");
            }
#if false
            Vector rotateMean = rotate.GetMean();
            double mean       = mCos * rotateMean[0] + mSin * rotateMean[1];
#else
            double rotateMean0 = rotate.MeanTimesPrecision[0] / rotate.Precision[0, 0];
            double rotateMean1 = rotate.MeanTimesPrecision[1] / rotate.Precision[1, 1];
            double mean        = mCos * rotateMean0 + mSin * rotateMean1;
#endif
            if (double.IsNaN(mean))
            {
                throw new ApplicationException("result is nan");
            }
            return(Gaussian.FromMeanAndPrecision(mean, prec));
        }
예제 #25
0
        public void JaggedSetValues()
        {
            Array jaggedGaussian = JaggedArray.ConvertToNew(
                jagged, typeof(int), typeof(Gaussian),
                delegate(object elt) { return(new Gaussian(0.0, 1.0)); });

            Assert.Equal(
                JaggedArray.GetLength(jaggedGaussian, typeof(Gaussian)),
                JaggedArray.GetLength(jagged, typeof(int)));

            JaggedArray.ConvertElements(
                jaggedGaussian, typeof(Gaussian),
                delegate(object elt) { return(Gaussian.FromMeanAndPrecision(1.0, 2.0)); });

            foreach (Gaussian g in JaggedArray.ElementIterator(jaggedGaussian, typeof(Gaussian)))
            {
                Assert.Equal(1.0, g.GetMean());
            }
        }
예제 #26
0
        public static Gaussian FindxF(Gaussian xB, Gaussian meanPrior, Gamma precPrior, Gaussian xF)
        {
            Gaussian xF3 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior);
            Func <Vector, double> func = delegate(Vector x2)
            {
                Gaussian xF2 = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1]));
                Gaussian xB2 = IsPositiveOp.XAverageConditional(true, xF2);
                //return (xF2*xB2).MaxDiff(xF2*xB) + (xF3*xB).MaxDiff(xF2*xB);
                //return KlDiv(xF2*xB2, xF2*xB) + KlDiv(xF3*xB, xF2*xB);
                //return KlDiv(xF3*xB, xF2*xB) + Math.Pow((xF2*xB2).GetMean() - (xF2*xB).GetMean(),2);
                return(KlDiv(xF2 * xB2, xF2 * xB) + MeanError(xF3 * xB, xF2 * xB));
            };

            double m = xF.GetMean();
            double p = xF.Precision;
            Vector x = Vector.FromArray(m, System.Math.Log(p));

            Minimize2(func, x);
            //MinimizePowell(func, x);
            return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1])));
        }
예제 #27
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="SumAverageConditional(double, Gaussian)"]/*'/>
        public static Gaussian SumAverageConditional(double a, [SkipIfUniform] Gaussian b)
        {
            if (b.IsPointMass)
            {
                return(SumAverageConditional(a, b.Point));
            }
            if (b.IsUniform())
            {
                return(b);
            }
            double meanTimesPrecision = b.MeanTimesPrecision + a * b.Precision;

            if (Math.Abs(meanTimesPrecision) > double.MaxValue)
            {
                return(Gaussian.FromMeanAndPrecision(b.GetMean() + a, b.Precision));
            }
            else
            {
                return(Gaussian.FromNatural(meanTimesPrecision, b.Precision));
            }
        }
예제 #28
0
        private void Max_MaxPointMass(Gaussian a, Gaussian b)
        {
            double   point   = 3;
            Gaussian toPoint = MaxGaussianOp.MaxAverageConditional(Gaussian.PointMass(point), a, b);
            //Console.WriteLine($"{point} {toPoint} {toPoint.MeanTimesPrecision} {toPoint.Precision}");
            double oldDiff = double.PositiveInfinity;

            for (int i = 5; i < 100; i++)
            {
                Gaussian max    = Gaussian.FromMeanAndPrecision(point, System.Math.Pow(10, i));
                Gaussian to_max = MaxGaussianOp.MaxAverageConditional(max, a, b);
                double   diff   = toPoint.MaxDiff(to_max);
                //Console.WriteLine($"{max} {to_max} {to_max.MeanTimesPrecision} {to_max.Precision} {diff}");
                if (diff < 1e-14)
                {
                    diff = 0;
                }
                Assert.True(diff <= oldDiff);
                oldDiff = diff;
            }
        }
예제 #29
0
        private Gaussian StudentIsPositiveExact(double mean, Gamma precPrior, out double evidence)
        {
            // importance sampling for true answer
            GaussianEstimator est = new GaussianEstimator();
            int nSamples          = 1000000;

            evidence = 0;
            for (int iter = 0; iter < nSamples; iter++)
            {
                double   precSample = precPrior.Sample();
                Gaussian xPrior     = Gaussian.FromMeanAndPrecision(mean, precSample);
                double   logWeight  = IsPositiveOp.LogAverageFactor(true, xPrior);
                evidence += System.Math.Exp(logWeight);
                double xSample = xPrior.Sample();
                if (xSample > 0)
                {
                    est.Add(xSample);
                }
            }
            evidence /= nSamples;
            return(est.GetDistribution(new Gaussian()));
        }
예제 #30
0
 public EstimatorTest()
 {
     // Create distribution jagged 2D array, and create
     // the parallel jagged 2D array of distributions
     dim1            = 2;
     dim2            = 3;
     ga2aDistArray   = new GaussianArray2DArray(dim1, dim2);
     ga2aArrayOfDist = new Gaussian[dim1, dim2][];
     for (int i = 0; i < dim1; i++)
     {
         for (int j = 0; j < dim2; j++)
         {
             ga2aDistArray[i, j]   = new GaussianArray(i + j + 1);
             ga2aArrayOfDist[i, j] = new Gaussian[i + j + 1];
             for (int k = 0; k < ga2aDistArray[i, j].Count; k++)
             {
                 ga2aDistArray[i, j][k]   = Gaussian.FromMeanAndPrecision((double)k, (double)((k + 1) * (k + 1)));
                 ga2aArrayOfDist[i, j][k] = new Gaussian(ga2aDistArray[i, j][k]);
             }
         }
     }
 }