/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="AAverageConditional(double, TruncatedGaussian)"]/*'/> public static TruncatedGaussian AAverageConditional(double sum, [SkipIfUniform] TruncatedGaussian b) { Gaussian prior = b.Gaussian; Gaussian post = AAverageConditional(sum, prior); return(new TruncatedGaussian(post, sum - b.UpperBound, sum - b.LowerBound)); }
public static RpropBufferData BufferTG([NoInit] TruncatedGaussian use, TruncatedGaussian def, TruncatedGaussian to_marginal, RpropBufferData bufferTG) { var currDist = use * def; if (currDist.IsPointMass) { if (double.IsInfinity(currDist.Point)) { throw new ArgumentOutOfRangeException(nameof(use), "infinite point mass"); } bufferTG.nextPoint = currDist.Point; return(bufferTG); } // cannot use buffer.nextPoint as currPoint since Marginal could be initialized by user double currPoint; if (to_marginal.IsPointMass) { currPoint = to_marginal.Point; } else { currPoint = currDist.GetMean(); } // deriv of -0.5*prec*x^2+pm*x // is -prec*x + pm double currDeriv = currDist.Gaussian.MeanTimesPrecision - currDist.Gaussian.Precision * currPoint; bufferTG.lowerBound = currDist.LowerBound; bufferTG.upperBound = currDist.UpperBound; bufferTG.SetNextPoint(currPoint, currDeriv); return(bufferTG); }
public static double AverageLogFactor(TruncatedGaussian copy, Gaussian value, Gaussian to_value) { var a = value / to_value; copy *= new TruncatedGaussian(a); return(value.GetAverageLog(value) - copy.GetAverageLog(copy)); }
public void TruncatedGaussianNormaliser() { double a = 0, b = 2; var g = new TruncatedGaussian(3, 1, a, b); double Z = Quadrature.AdaptiveTrapeziumRule(x => System.Math.Exp(g.GetLogProb(x)), 32, a, b, 1e-10, 10000); Assert.True((1.0 - Z) < 1e-4); }
public static Gaussian CopyAverageConditional(TruncatedGaussian value) { if (!value.IsPointMass) { throw new ArgumentException("value is not a point mass"); } return(value.ToGaussian()); }
public static Gaussian ValueAverageLogarithm(TruncatedGaussian copy, [Proper] Gaussian value, Gaussian to_value) { var a = value / to_value; copy *= new TruncatedGaussian(a); // is this ok? var result = copy.ToGaussian() / a; return(result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="AAverageConditional(TruncatedGaussian, double)"]/*'/> public static TruncatedGaussian AAverageConditional([SkipIfUniform] TruncatedGaussian sum, double b) { Gaussian prior = sum.Gaussian; Gaussian post = AAverageConditional(prior, b); TruncatedGaussian result = sum; result.Gaussian = post; result.LowerBound -= b; result.UpperBound -= b; return(result); }
// ---------------------------------------------------------------------------------------------------------------------- // TruncatedGaussian // ---------------------------------------------------------------------------------------------------------------------- /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="SumAverageConditional(double, TruncatedGaussian)"]/*'/> public static TruncatedGaussian SumAverageConditional(double a, [SkipIfUniform] TruncatedGaussian b) { Gaussian prior = b.Gaussian; Gaussian post = SumAverageConditional(a, prior); TruncatedGaussian result = b; result.Gaussian = post; result.LowerBound += a; result.UpperBound += a; return(result); }
public static TruncatedGaussian XAverageConditional([SkipIfUniform] Bernoulli isPositive) { if (isPositive.IsUniform()) { return(TruncatedGaussian.Uniform()); } if (isPositive.IsPointMass) { return(XAverageConditional(isPositive.Point)); } throw new NotSupportedException("Cannot return a TruncatedGaussian when isPositive is random"); }
public static double AverageLogFactor_helper([SkipIfUniform] Gaussian X, Gaussian to_X) { //if (!isPositive) throw new ArgumentException("VariationalMessagePassing requires isPositive=true", "isPositive"); var prior = X / to_X; if (!prior.IsProper()) { throw new ImproperMessageException(prior); } var tg = new TruncatedGaussian(prior); tg.LowerBound = 0; // Remove the incorrect Gaussian entropy contribution and add the correct // truncated Gaussian entropy. Log(1)=0 so the factor itself does not contribute. return(X.GetAverageLog(X) - tg.GetAverageLog(tg)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="VariablePointOp_RpropTruncatedGaussian"]/message_doc[@name="MarginalAverageConditional(TruncatedGaussian, TruncatedGaussian, RpropBufferData, TruncatedGaussian)"]/*'/> public static TruncatedGaussian MarginalAverageConditional([IgnoreDependency] TruncatedGaussian use, [IgnoreDependency] TruncatedGaussian def, [RequiredArgument] RpropBufferData bufferTG, TruncatedGaussian result) { result.Point = bufferTG.nextPoint; return(result); }
public static TruncatedGaussian SampleAverageConditional(double mean, double precision, TruncatedGaussian result) { return TruncatedGaussian.FromGaussian(Gaussian.FromMeanAndPrecision(mean, precision)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp"]/message_doc[@name="MeanAverageConditional(double, double, TruncatedGaussian)"]/*'/> public static TruncatedGaussian MeanAverageConditional(double sample, double variance, TruncatedGaussian result) { return(SampleAverageConditional(sample, variance, result)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="GaussianFromMeanAndVarianceOp"]/message_doc[@name="SampleAverageConditional(double, double, TruncatedGaussian)"]/*'/> public static TruncatedGaussian SampleAverageConditional(double mean, double variance, TruncatedGaussian result) { return(TruncatedGaussian.FromGaussian(Gaussian.FromMeanAndVariance(mean, variance))); }
public static double AverageLogFactor(bool isBetween, [Stochastic] Gaussian X, double lowerBound, double upperBound, Gaussian to_X) { if (!isBetween) throw new ArgumentException("TruncatedGaussian requires isBetween=true", "isBetween"); var prior = X / to_X; var tg = new TruncatedGaussian(prior); tg.LowerBound = lowerBound; tg.UpperBound = upperBound; return X.GetAverageLog(X) - tg.GetAverageLog(tg); }
public void Initialize() { // DO NOT make this a constructor, because it makes the test not notice complete lack of serialization as an empty object is set up exactly as the thing // you are trying to deserialize. this.pareto = new Pareto(1.2, 3.5); this.poisson = new Poisson(2.3); this.wishart = new Wishart(20, new PositiveDefiniteMatrix(new double[, ] { { 22, 21 }, { 21, 23 } })); this.vectorGaussian = new VectorGaussian(Vector.FromArray(13, 14), new PositiveDefiniteMatrix(new double[, ] { { 16, 15 }, { 15, 17 } })); this.unnormalizedDiscrete = UnnormalizedDiscrete.FromLogProbs(DenseVector.FromArray(5.1, 5.2, 5.3)); this.pointMass = PointMass <double> .Create(1.1); this.gaussian = new Gaussian(11.0, 12.0); this.nonconjugateGaussian = new NonconjugateGaussian(1.2, 2.3, 3.4, 4.5); this.gamma = new Gamma(9.0, 10.0); this.gammaPower = new GammaPower(5.6, 2.8, 3.4); this.discrete = new Discrete(6.0, 7.0, 8.0); this.conjugateDirichlet = new ConjugateDirichlet(1.2, 2.3, 3.4, 4.5); this.dirichlet = new Dirichlet(3.0, 4.0, 5.0); this.beta = new Beta(2.0, 1.0); this.binomial = new Binomial(5, 0.8); this.bernoulli = new Bernoulli(0.6); this.sparseBernoulliList = SparseBernoulliList.Constant(4, new Bernoulli(0.1)); this.sparseBernoulliList[1] = new Bernoulli(0.9); this.sparseBernoulliList[3] = new Bernoulli(0.7); this.sparseBetaList = SparseBetaList.Constant(5, new Beta(2.0, 2.0)); this.sparseBetaList[0] = new Beta(3.0, 4.0); this.sparseBetaList[1] = new Beta(5.0, 6.0); this.sparseGaussianList = SparseGaussianList.Constant(6, Gaussian.FromMeanAndPrecision(0.1, 0.2)); this.sparseGaussianList[4] = Gaussian.FromMeanAndPrecision(0.3, 0.4); this.sparseGaussianList[5] = Gaussian.FromMeanAndPrecision(0.5, 0.6); this.sparseGammaList = SparseGammaList.Constant(1, Gamma.FromShapeAndRate(1.0, 2.0)); this.truncatedGamma = new TruncatedGamma(1.2, 2.3, 3.4, 4.5); this.truncatedGaussian = new TruncatedGaussian(1.2, 3.4, 5.6, 7.8); this.wrappedGaussian = new WrappedGaussian(1.2, 2.3, 3.4); ga = Distribution <double> .Array(new[] { this.gaussian, this.gaussian }); vga = Distribution <Vector> .Array(new[] { this.vectorGaussian, this.vectorGaussian }); ga2D = Distribution <double> .Array(new[, ] { { this.gaussian, this.gaussian }, { this.gaussian, this.gaussian } }); vga2D = Distribution <Vector> .Array(new[, ] { { this.vectorGaussian, this.vectorGaussian }, { this.vectorGaussian, this.vectorGaussian } }); gaJ = Distribution <double> .Array(new[] { new[] { this.gaussian, this.gaussian }, new[] { this.gaussian, this.gaussian } }); vgaJ = Distribution <Vector> .Array(new[] { new[] { this.vectorGaussian, this.vectorGaussian }, new[] { this.vectorGaussian, this.vectorGaussian } }); var gp = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0)); var basis = Util.ArrayInit(2, i => Vector.FromArray(1.0 * i)); this.sparseGp = new SparseGP(new SparseGPFixed(gp, basis)); this.quantileEstimator = new QuantileEstimator(0.01); this.quantileEstimator.Add(5); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="SumAverageConditional(TruncatedGaussian, double)"]/*'/> public static TruncatedGaussian SumAverageConditional([SkipIfUniform] TruncatedGaussian a, double b) { return(SumAverageConditional(b, a)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="LogAverageFactor(double, double, TruncatedGaussian)"]/*'/> public static double LogAverageFactor(double sum, double a, [SkipIfUniform] TruncatedGaussian b) { TruncatedGaussian to_sum = SumAverageConditional(a, b); return(to_sum.GetLogProb(sum)); }
public static double AverageLogFactor(bool isPositive, [SkipIfUniform] TruncatedGaussian x) { return(0.0); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusTruncatedGaussianOp"]/message_doc[@name="SumAverageConditional(Gaussian, TruncatedGaussian)"]/*'/> public static Gaussian SumAverageConditional([SkipIfUniform] Gaussian a, [SkipIfUniform] TruncatedGaussian b) { return(DoublePlusOp.SumAverageConditional(a, b.ToGaussian())); }
public void Sample(Options options, Matrix data) { if (options.numParams > 2) { throw new Exception("numParams > 2"); } int numStudents = data.Rows; int numQuestions = data.Cols; // initialize the sampler at the mean of the priors (not sampling from the priors) double abilityMean = abilityMeanPrior.GetMean(); double abilityPrec = abilityPrecPrior.GetMean(); double difficultyMean = difficultyMeanPrior.GetMean(); double difficultyPrec = difficultyPrecPrior.GetMean(); double discriminationMean = discriminationMeanPrior.GetMean(); double discriminationPrec = discriminationPrecPrior.GetMean(); double[] ability = new double[numStudents]; double[] difficulty = new double[numQuestions]; List <double>[] difficultySamples = new List <double> [numQuestions]; GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { difficultyEstimator[question] = new GaussianEstimator(); difficultySamples[question] = new List <double>(); if (difficultyObserved != null) { difficulty[question] = difficultyObserved[question]; difficultyEstimator[question].Add(difficultyObserved[question]); difficultySamples[question].Add(difficultyObserved[question]); } } List <double>[] abilitySamples = new List <double> [numStudents]; GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length]; for (int student = 0; student < abilityEstimator.Length; student++) { abilityEstimator[student] = new GaussianEstimator(); abilitySamples[student] = new List <double>(); if (abilityObserved != null) { ability[student] = abilityObserved[student]; abilityEstimator[student].Add(abilityObserved[student]); abilitySamples[student].Add(abilityObserved[student]); } } double[] discrimination = new double[numQuestions]; List <double>[] discriminationSamples = new List <double> [numQuestions]; GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { discriminationEstimator[question] = new GammaEstimator(); discriminationSamples[question] = new List <double>(); discrimination[question] = 1; if (discriminationObserved != null) { discrimination[question] = discriminationObserved[question]; discriminationEstimator[question].Add(discriminationObserved[question]); discriminationSamples[question].Add(discriminationObserved[question]); } } responseProbMean = new Matrix(numStudents, numQuestions); int niters = options.numberOfSamples; int burnin = options.burnIn; double logisticVariance = Math.PI * Math.PI / 3; double shape = 4.5; Gamma precPrior = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance); precPrior = Gamma.PointMass(1); double[,] prec = new double[numStudents, numQuestions]; double[,] x = new double[numStudents, numQuestions]; int numRejected = 0, numAttempts = 0; for (int iter = 0; iter < niters; iter++) { for (int student = 0; student < numStudents; student++) { for (int question = 0; question < numQuestions; question++) { // sample prec given ability, difficulty, x // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2) Gamma precPost = precPrior; double xMean = (ability[student] - difficulty[question]) * discrimination[question]; double delta = x[student, question] - xMean; Gamma like = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta); precPost.SetToProduct(precPost, like); prec[student, question] = precPost.Sample(); // sample x given ability, difficulty, prec, data // using an independence chain MH bool y = (data[student, question] > 0); double sign = y ? 1.0 : -1.0; Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]); // we want to sample from xPrior*I(x>0) // instead we sample from xPost Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior); double oldx = x[student, question]; double newx = xPost.Sample(); numAttempts++; if (newx * sign < 0) { newx = oldx; // rejected numRejected++; } else { // importance weights double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx); double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { newx = oldx; // rejected numRejected++; } } x[student, question] = newx; if (iter >= burnin) { double responseProb = MMath.Logistic(xMean); responseProbMean[student, question] += responseProb; } } } if (abilityObserved == null) { // sample ability given difficulty, prec, x for (int student = 0; student < numStudents; student++) { Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec); for (int question = 0; question < numQuestions; question++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec) Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, abilityLike); } ability[student] = post.Sample(); if (iter >= burnin) { abilityEstimator[student].Add(post); abilitySamples[student].Add(ability[student]); } } } // sample difficulty given ability, prec, x for (int question = 0; question < numQuestions; question++) { Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec); for (int student = 0; student < numStudents; student++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec) if (discrimination[question] > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, like); } } difficulty[question] = post.Sample(); if (iter >= burnin) { //if (difficulty[question] > 100) // Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]); difficultyEstimator[question].Add(post); difficultySamples[question].Add(difficulty[question]); } } if (options.numParams > 1 && discriminationObserved == null) { // sample discrimination given ability, difficulty, prec, x for (int question = 0; question < numQuestions; question++) { // moment-matching on the prior Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1)); Gaussian post = approxPrior; for (int student = 0; student < numStudents; student++) { // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2) double delta = ability[student] - difficulty[question]; if (delta > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta); post.SetToProduct(post, like); } } TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity); double olddisc = discrimination[question]; double newdisc = postTrunc.Sample(); // importance weights Func <double, double> priorLogProb = delegate(double d) { double logd = Math.Log(d); return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd); }; double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc); double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { // rejected } else { discrimination[question] = newdisc; } if (iter >= burnin) { discriminationEstimator[question].Add(discrimination[question]); discriminationSamples[question].Add(discrimination[question]); } } } // sample abilityMean given ability, abilityPrec Gaussian abilityMeanPost = abilityMeanPrior; for (int student = 0; student < numStudents; student++) { Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec); abilityMeanPost *= like; } abilityMean = abilityMeanPost.Sample(); // sample abilityPrec given ability, abilityMean Gamma abilityPrecPost = abilityPrecPrior; for (int student = 0; student < numStudents; student++) { Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean); abilityPrecPost *= like; } abilityPrec = abilityPrecPost.Sample(); // sample difficultyMean given difficulty, difficultyPrec Gaussian difficultyMeanPost = difficultyMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec); difficultyMeanPost *= like; } difficultyMean = difficultyMeanPost.Sample(); // sample difficultyPrec given difficulty, difficultyMean Gamma difficultyPrecPost = difficultyPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean); difficultyPrecPost *= like; } difficultyPrec = difficultyPrecPost.Sample(); // sample discriminationMean given discrimination, discriminationPrec Gaussian discriminationMeanPost = discriminationMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec); discriminationMeanPost *= like; } discriminationMean = discriminationMeanPost.Sample(); // sample discriminationPrec given discrimination, discriminationMean Gamma discriminationPrecPost = discriminationPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean); discriminationPrecPost *= like; } discriminationPrec = discriminationPrecPost.Sample(); //if (iter % 1 == 0) // Console.WriteLine("iter = {0}", iter); } //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec); //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec); int numSamplesUsed = niters - burnin; responseProbMean.Scale(1.0 / numSamplesUsed); //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts); difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform())); abilityPost = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform())); if (options.numParams > 1) { discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma())); } abilityCred = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples); difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples); bool saveSamples = false; if (saveSamples) { using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat")) { int q = 11; writer.Write("difficulty", difficultySamples[q]); writer.Write("discrimination", discriminationSamples[q]); } } }
public static double AverageLogFactor_helper([SkipIfUniform] Gaussian X, Gaussian to_X) { //if (!isPositive) throw new ArgumentException("VariationalMessagePassing requires isPositive=true", "isPositive"); var prior = X / to_X; if (!prior.IsProper()) throw new ImproperMessageException(prior); var tg = new TruncatedGaussian(prior); tg.LowerBound = 0; // Remove the incorrect Gaussian entropy contribution and add the correct // truncated Gaussian entropy. Log(1)=0 so the factor itself does not contribute. return X.GetAverageLog(X) - tg.GetAverageLog(tg); }
public static TruncatedGaussian MeanAverageConditional(double sample, double precision, TruncatedGaussian result) { return SampleAverageConditional(sample, precision, result); }
public static double AverageLogFactor(TruncatedGaussian X) { return 0; }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="TruncatedGaussianIsBetweenOp"]/message_doc[@name="LogEvidenceRatio(bool, TruncatedGaussian, double, double)"]/*'/> public static double LogEvidenceRatio(bool isBetween, [SkipIfUniform] TruncatedGaussian x, double lowerBound, double upperBound) { return(x.GetLogAverageOf(XAverageConditional(isBetween, lowerBound, upperBound))); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="PlusTruncatedGaussianOp"]/message_doc[@name="AAverageConditional(Gaussian, Gaussian)"]/*'/> public static Gaussian AAverageConditional([SkipIfUniform] Gaussian sum, [SkipIfUniform] TruncatedGaussian b) { return(DoublePlusOp.AAverageConditional(sum, b.ToGaussian())); }
public static double AverageLogFactor(TruncatedGaussian X) { return(0); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="BAverageConditional(double, TruncatedGaussian)"]/*'/> public static TruncatedGaussian BAverageConditional(double sum, [SkipIfUniform] TruncatedGaussian a) { return(AAverageConditional(sum, a)); }
public void Initialize(bool skipStringDistributions = false) { // DO NOT make this a constructor, because it makes the test not notice complete lack of serialization as an empty object is set up exactly as the thing // you are trying to deserialize. this.pareto = new Pareto(1.2, 3.5); this.poisson = new Poisson(2.3); this.wishart = new Wishart(20, new PositiveDefiniteMatrix(new double[, ] { { 22, 21 }, { 21, 23 } })); this.vectorGaussian = new VectorGaussian(Vector.FromArray(13, 14), new PositiveDefiniteMatrix(new double[, ] { { 16, 15 }, { 15, 17 } })); this.unnormalizedDiscrete = UnnormalizedDiscrete.FromLogProbs(DenseVector.FromArray(5.1, 5.2, 5.3)); this.pointMass = PointMass <double> .Create(1.1); this.gaussian = new Gaussian(11.0, 12.0); this.nonconjugateGaussian = new NonconjugateGaussian(1.2, 2.3, 3.4, 4.5); this.gamma = new Gamma(9.0, 10.0); this.gammaPower = new GammaPower(5.6, 2.8, 3.4); this.discrete = new Discrete(6.0, 7.0, 8.0); this.conjugateDirichlet = new ConjugateDirichlet(1.2, 2.3, 3.4, 4.5); this.dirichlet = new Dirichlet(3.0, 4.0, 5.0); this.beta = new Beta(2.0, 1.0); this.binomial = new Binomial(5, 0.8); this.bernoulli = new Bernoulli(0.6); this.sparseBernoulliList = SparseBernoulliList.Constant(4, new Bernoulli(0.1)); this.sparseBernoulliList[1] = new Bernoulli(0.9); this.sparseBernoulliList[3] = new Bernoulli(0.7); this.sparseBetaList = SparseBetaList.Constant(5, new Beta(2.0, 2.0)); this.sparseBetaList[0] = new Beta(3.0, 4.0); this.sparseBetaList[1] = new Beta(5.0, 6.0); this.sparseGaussianList = SparseGaussianList.Constant(6, Gaussian.FromMeanAndPrecision(0.1, 0.2)); this.sparseGaussianList[4] = Gaussian.FromMeanAndPrecision(0.3, 0.4); this.sparseGaussianList[5] = Gaussian.FromMeanAndPrecision(0.5, 0.6); this.sparseGammaList = SparseGammaList.Constant(1, Gamma.FromShapeAndRate(1.0, 2.0)); this.truncatedGamma = new TruncatedGamma(1.2, 2.3, 3.4, 4.5); this.truncatedGaussian = new TruncatedGaussian(1.2, 3.4, 5.6, 7.8); this.wrappedGaussian = new WrappedGaussian(1.2, 2.3, 3.4); ga = Distribution <double> .Array(new[] { this.gaussian, this.gaussian }); vga = Distribution <Vector> .Array(new[] { this.vectorGaussian, this.vectorGaussian }); ga2D = Distribution <double> .Array(new[, ] { { this.gaussian, this.gaussian }, { this.gaussian, this.gaussian } }); vga2D = Distribution <Vector> .Array(new[, ] { { this.vectorGaussian, this.vectorGaussian }, { this.vectorGaussian, this.vectorGaussian } }); gaJ = Distribution <double> .Array(new[] { new[] { this.gaussian, this.gaussian }, new[] { this.gaussian, this.gaussian } }); vgaJ = Distribution <Vector> .Array(new[] { new[] { this.vectorGaussian, this.vectorGaussian }, new[] { this.vectorGaussian, this.vectorGaussian } }); var gp = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0)); var basis = Util.ArrayInit(2, i => Vector.FromArray(1.0 * i)); this.sparseGp = new SparseGP(new SparseGPFixed(gp, basis)); this.quantileEstimator = new QuantileEstimator(0.01); this.quantileEstimator.Add(5); this.outerQuantiles = OuterQuantiles.FromDistribution(3, this.quantileEstimator); this.innerQuantiles = InnerQuantiles.FromDistribution(3, this.outerQuantiles); if (!skipStringDistributions) { // String distributions can not be serialized by some formatters (namely BinaryFormatter) // That is fine because this combination is never used in practice this.stringDistribution1 = StringDistribution.String("aa") .Append(StringDistribution.OneOf("b", "ccc")).Append("dddd"); this.stringDistribution2 = new StringDistribution(); this.stringDistribution2.SetToProduct(StringDistribution.OneOf("a", "b"), StringDistribution.OneOf("b", "c")); } }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DoublePlusOp"]/message_doc[@name="LogAverageFactor(TruncatedGaussian, double, double)"]/*'/> public static double LogAverageFactor([SkipIfUniform] TruncatedGaussian sum, double a, double b) { return(sum.GetLogProb(Factor.Plus(a, b))); }
public static TruncatedGaussian SampleAverageConditional(double mean, double variance, TruncatedGaussian result) { return TruncatedGaussian.FromGaussian(Gaussian.FromMeanAndVariance(mean, variance)); }
public static Gaussian CopyAverageLogarithm(TruncatedGaussian value) { return(value.ToGaussian()); }
public static TruncatedGaussian MeanAverageConditional(double sample, double variance, TruncatedGaussian result) { return SampleAverageConditional(sample, variance, result); }
public static Gaussian XAverageLogarithm(bool isBetween, [Stochastic] Gaussian X, double lowerBound, double upperBound, Gaussian to_X) { if (!isBetween) throw new ArgumentException("TruncatedGaussian requires isBetween=true", "isBetween"); var prior = X / to_X; var tg = new TruncatedGaussian(prior); tg.LowerBound = lowerBound; tg.UpperBound = upperBound; return tg.ToGaussian() / prior; }
public static double AverageLogFactor([IgnoreDependency] TruncatedGaussian X) { return(0); }