/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BetaOp"]/message_doc[@name="MeanAverageLogarithm(Beta, Beta, Gamma, Beta)"]/*'/> public static Beta MeanAverageLogarithm([Proper] Beta prob, Beta mean, [Proper] Gamma totalCount, Beta to_mean) { // Calculate gradient using method for DirichletOp double ELogP, ELogOneMinusP; prob.GetMeanLogs(out ELogP, out ELogOneMinusP); Vector gradS = DirichletOp.CalculateGradientForMean( Vector.FromArray(new double[] { mean.TrueCount, mean.FalseCount }), totalCount, Vector.FromArray(new double[] { ELogP, ELogOneMinusP })); // Project onto a Beta distribution Matrix A = new Matrix(2, 2); double c = MMath.Trigamma(mean.TotalCount); A[0, 0] = MMath.Trigamma(mean.TrueCount) - c; A[1, 0] = A[0, 1] = -c; A[1, 1] = MMath.Trigamma(mean.FalseCount) - c; Vector theta = GammaFromShapeAndRateOp.twoByTwoInverse(A) * gradS; Beta approximateFactor = new Beta(theta[0] + 1, theta[1] + 1); if (damping == 0.0) { return(approximateFactor); } else { return((approximateFactor ^ (1 - damping)) * (to_mean ^ damping)); } }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="LogOp_VMP"]/message_doc[@name="DAverageLogarithm(Gaussian, Gamma, Gamma)"]/*'/> public static Gamma DAverageLogarithm([SkipIfUniform] Gaussian log, [SkipIfUniform] Gamma d, Gamma to_D) { if (log.IsPointMass) { return(DAverageLogarithm(log.Point, d, to_D)); } Vector grad = Vector.Zero(2); double meanLog = d.GetMeanLog(); double m, v; log.GetMeanAndVariance(out m, out v); grad[0] = -MMath.Tetragamma(d.Shape) / (2 * v) - MMath.Trigamma(d.Shape) / v * (meanLog - m); grad[1] = (meanLog - m) / (v * d.Rate); Gamma approximateFactor = GammaFromShapeAndRateOp.NonconjugateProjection(d, grad); if (damping == 0.0) { return(approximateFactor); } else { return((approximateFactor ^ (1 - damping)) * (to_D ^ damping)); } }