public static Gaussian GPPrediction(Vector x, Vector[] xData, Gaussian[] y, KernelFunction kf, PositiveDefiniteMatrix spec) { var KxD = Vector.FromArray(xData.Select(o => kf.EvaluateX1X2(x, o)).ToArray()); double mean = spec.QuadraticForm(KxD, Vector.FromArray(y.Select(o => o.GetMean()).ToArray())); double variance = kf.EvaluateX1X2(x, x) - spec.QuadraticForm(KxD); return(Gaussian.FromMeanAndVariance(mean, variance)); }
/// <summary> /// VMP message to 'X' /// </summary> /// <param name="A">Incoming message from 'A'. Must be a proper distribution. If all elements are uniform, the result will be uniform.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If all elements are uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'X' as the random arguments are varied. /// The formula is <c>proj[sum_(A,B) p(A,B) factor(X,A,B)]</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="A"/> is not a proper distribution</exception> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian XAverageLogarithm([SkipIfAllUniform] GaussianArray A, [SkipIfAllUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { int K = MeanOfB.Count; // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) var ma = Vector.Zero(K); var va = Vector.Zero(K); for (int k = 0; k < K; k++) { double m, v; A[k].GetMeanAndVariance(out m, out v); ma[k] = m; va[k] = v; } // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. var mbj2 = Vector.Zero(K); mbj2.SetToFunction(MeanOfB, x => x * x); // slooow Gaussian result = new Gaussian(); result.SetMeanAndVariance(ma.Inner(MeanOfB), va.Inner(mbj2) + CovarianceOfB.QuadraticForm(ma) + va.Inner(CovarianceOfB.Diagonal())); if (result.Precision < 0) { throw new ApplicationException("improper message"); } return(result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductOpBase"]/message_doc[@name="AAverageConditional(Gaussian, Vector, DenseVector, PositiveDefiniteMatrix, VectorGaussian)"]/*'/> public static VectorGaussian AAverageConditional([SkipIfUniform] Gaussian innerProduct, Vector A, DenseVector BMean, PositiveDefiniteMatrix BVariance, VectorGaussian result) { if (innerProduct.IsUniform()) { return(VectorGaussian.Uniform(A.Count)); } // logZ = log N(mProduct; A'*BMean, vProduct + A'*BVariance*A) // = -0.5 (mProduct - A'*BMean)^2 / (vProduct + A'*BVariance*A) -0.5 log(vProduct + A'*BVariance*A) // v*innerProduct.Precision double v = 1 + BVariance.QuadraticForm(A, A) * innerProduct.Precision; double diff = innerProduct.MeanTimesPrecision - A.Inner(BMean) * innerProduct.Precision; // dlogZ/dA = BMean * (mProduct - A'*BMean)/v + (BVariance*A) (diff)^2 / v^2 - (BVariance*A)/v double diff2 = diff * diff; double v2 = v * v; var avb = BVariance * A; var avbPrec = avb * innerProduct.Precision; var dlogZ = (BMean * diff - avbPrec) / v + avb * diff2 / v2; // -ddlogZ/dA^2 = (BMean.Outer(BMean) + BVariance) / v + avb.Outer(avb) * (4 * diff2 / (v2 * v)) // -(avb.Outer(avb - BMean * (2 * diff)) * 2 + BVariance * diff2) / v2; PositiveDefiniteMatrix negativeHessian = BVariance * (innerProduct.Precision / v - diff2 / v2); negativeHessian.SetToSumWithOuter(negativeHessian, innerProduct.Precision / v, BMean, BMean); negativeHessian.SetToSumWithOuter(negativeHessian, 4 * diff2 / (v2 * v) - 2 * innerProduct.Precision / v2, avb, avbPrec); negativeHessian.SetToSumWithOuter(negativeHessian, 2 * diff / v2, avbPrec, BMean); negativeHessian.SetToSumWithOuter(negativeHessian, 2 * diff / v2, BMean, avbPrec); negativeHessian.Symmetrize(); return(VectorGaussian.FromDerivatives(A, dlogZ, negativeHessian, GaussianProductOp.ForceProper)); }
/// <summary> /// VMP message to 'innerProduct' /// </summary> /// <param name="A">Constant value for 'a'.</param> /// <param name="BMean">Buffer 'BMean'.</param> /// <param name="BVariance">Buffer 'BVariance'.</param> /// <returns>The outgoing VMP message to the 'innerProduct' argument</returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'innerProduct' conditioned on the given values. /// </para></remarks> public static Gaussian InnerProductAverageLogarithm(Vector A, Vector BMean, PositiveDefiniteMatrix BVariance) { Gaussian result = new Gaussian(); // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. // p(x) = N(a' E[b], a' var(b) a) result.SetMeanAndVariance(A.Inner(BMean), BVariance.QuadraticForm(A)); return result; }
/// <summary> /// VMP message to 'innerProduct' /// </summary> /// <param name="AMean">Buffer 'AMean'.</param> /// <param name="AVariance">Buffer 'AVariance'.</param> /// <param name="BMean">Buffer 'BMean'.</param> /// <param name="BVariance">Buffer 'BVariance'.</param> /// <returns>The outgoing VMP message to the 'innerProduct' argument</returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'innerProduct' conditioned on the given values. /// </para></remarks> public static Gaussian InnerProductAverageLogarithm(Vector AMean, PositiveDefiniteMatrix AVariance, Vector BMean, PositiveDefiniteMatrix BVariance) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. result.SetMeanAndVariance(AMean.Inner(BMean), AVariance.QuadraticForm(BMean) + BVariance.QuadraticForm(AMean) + AVariance.Inner(BVariance)); return result; }
/// <summary>VMP message to <c>innerProduct</c>.</summary> /// <param name="A">Constant value for <c>a</c>.</param> /// <param name="BMean">Buffer <c>BMean</c>.</param> /// <param name="BVariance">Buffer <c>BVariance</c>.</param> /// <returns>The outgoing VMP message to the <c>innerProduct</c> argument.</returns> /// <remarks> /// <para>The outgoing message is the factor viewed as a function of <c>innerProduct</c> conditioned on the given values.</para> /// </remarks> public static Gaussian InnerProductAverageLogarithm(Vector A, Vector BMean, PositiveDefiniteMatrix BVariance) { Gaussian result = new Gaussian(); // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. // p(x) = N(a' E[b], a' var(b) a) result.SetMeanAndVariance(A.Inner(BMean), BVariance.QuadraticForm(A)); return(result); }
/// <summary>VMP message to <c>innerProduct</c>.</summary> /// <param name="AMean">Buffer <c>AMean</c>.</param> /// <param name="AVariance">Buffer <c>AVariance</c>.</param> /// <param name="BMean">Buffer <c>BMean</c>.</param> /// <param name="BVariance">Buffer <c>BVariance</c>.</param> /// <returns>The outgoing VMP message to the <c>innerProduct</c> argument.</returns> /// <remarks> /// <para>The outgoing message is the factor viewed as a function of <c>innerProduct</c> conditioned on the given values.</para> /// </remarks> public static Gaussian InnerProductAverageLogarithm(Vector AMean, PositiveDefiniteMatrix AVariance, Vector BMean, PositiveDefiniteMatrix BVariance) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. result.SetMeanAndVariance(AMean.Inner(BMean), AVariance.QuadraticForm(BMean) + BVariance.QuadraticForm(AMean) + AVariance.Inner(BVariance)); return(result); }
/// <summary> /// VMP message to 'Sum' /// </summary> /// <param name="A">Constant value for 'A'.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <returns>The outgoing VMP message to the 'Sum' argument</returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'Sum' as the random arguments are varied. /// The formula is <c>proj[sum_(B) p(B) factor(Sum,A,B)]</c>. /// </para><para> /// Uses John Winn's rule for deterministic factors. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian SumAverageLogarithm(bool[] A, [SkipIfUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) Vector ma = Vector.FromArray(A.Select(x => x?1.0:0.0).ToArray()); // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. result.SetMeanAndVariance(ma.Inner(MeanOfB), CovarianceOfB.QuadraticForm(ma)); return(result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductPartialCovarianceOp"]/message_doc[@name="XAverageLogarithm(double[], VectorGaussian, Vector, PositiveDefiniteMatrix)"]/*'/> public static Gaussian XAverageLogarithm([SkipIfAllUniform] double[] A, [SkipIfAllUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) var ma = Vector.FromArray(A); // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. Gaussian result = new Gaussian(); result.SetMeanAndVariance(ma.Inner(MeanOfB), CovarianceOfB.QuadraticForm(ma)); if (result.Precision < 0) { throw new InferRuntimeException("improper message"); } return(result); }
public static double LogProb(this Mixture <VectorGaussian> mixture, MicrosoftResearch.Infer.Maths.Vector what) { double sum = 0; for (int i = 0; i < mixture.Components.Count; ++i) { MicrosoftResearch.Infer.Maths.Vector mean = mixture.Components[i].GetMean(); MicrosoftResearch.Infer.Maths.Vector diff = what - mean; PositiveDefiniteMatrix precision = mixture.Components[i].Precision; double prob = Math.Exp(-0.5 * precision.QuadraticForm(diff, diff)) * Math.Sqrt(precision.Determinant()) / Math.Pow(2 * Math.PI, what.Count * 0.5); sum += mixture.Weights[i] * prob; } sum /= mixture.WeightSum(); return(MathHelper.LogInf(sum)); }
/// <summary> /// VMP message to 'Sum' /// </summary> /// <param name="A">Incoming message from 'A'.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <returns>The outgoing VMP message to the 'Sum' argument</returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'Sum' as the random arguments are varied. /// The formula is <c>proj[sum_(A,B) p(A,B) factor(Sum,A,B)]</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian SumAverageLogarithm(DistributionStructArray <Bernoulli, bool> A, [SkipIfUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) Vector ma = Vector.Zero(A.Count); Vector va = Vector.Zero(A.Count); for (int i = 0; i < A.Count; i++) { ma[i] = A[i].GetMean(); va[i] = A[i].GetVariance(); } // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. var MeanOfBSquared = Vector.Zero(MeanOfB.Count); MeanOfBSquared.SetToFunction(MeanOfB, x => x * x); result.SetMeanAndVariance(ma.Inner(MeanOfB), va.Inner(MeanOfBSquared) + CovarianceOfB.QuadraticForm(ma) + va.Inner(CovarianceOfB.Diagonal())); return(result); }
/// <summary> /// VMP message to 'X' /// </summary> /// <param name="A">Incoming message from 'A'. Must be a proper distribution. If all elements are uniform, the result will be uniform.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If all elements are uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'X' as the random arguments are varied. /// The formula is <c>proj[sum_(A,B) p(A,B) factor(X,A,B)]</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="A"/> is not a proper distribution</exception> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian XAverageLogarithm([SkipIfAllUniform] GaussianArray A, [SkipIfAllUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { int K = MeanOfB.Count; // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) var ma = Vector.Zero(K); var va = Vector.Zero(K); for (int k = 0; k < K; k++) { double m, v; A[k].GetMeanAndVariance(out m, out v); ma[k] = m; va[k] = v; } // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. var mbj2 = Vector.Zero(K); mbj2.SetToFunction(MeanOfB, x => x * x); // slooow Gaussian result = new Gaussian(); result.SetMeanAndVariance(ma.Inner(MeanOfB), va.Inner(mbj2) + CovarianceOfB.QuadraticForm(ma) + va.Inner(CovarianceOfB.Diagonal())); if (result.Precision < 0) throw new ApplicationException("improper message"); return result; }
public static double LogAverageFactor( Bernoulli label, Vector point, Gaussian shapeX, Gaussian shapeY, PositiveDefiniteMatrix shapeOrientation) { VectorGaussian shapeLocationTimesFactor = ShapeLocationTimesFactor(point, shapeX, shapeY, shapeOrientation); double labelProbFalse = label.GetProbFalse(); double normalizerProduct = Math.Exp( shapeLocationTimesFactor.GetLogNormalizer() - 0.5 * shapeOrientation.QuadraticForm(point) - shapeX.GetLogNormalizer() - shapeY.GetLogNormalizer()); double averageFactor = labelProbFalse + (1 - 2 * labelProbFalse) * normalizerProduct; Debug.Assert(averageFactor > 0); return Math.Log(averageFactor); }
private static Gaussian ShapeAverageConditional( Vector point, Bernoulli label, Gaussian shapeX, Gaussian shapeY, PositiveDefiniteMatrix shapeOrientation, bool resultForXCoord) { if (shapeX.IsPointMass && shapeY.IsPointMass) { double labelProbTrue = label.GetProbTrue(); double labelProbFalse = 1.0 - labelProbTrue; double probDiff = labelProbTrue - labelProbFalse; Vector shapeLocation = Vector.FromArray(shapeX.Point, shapeY.Point); Vector diff = point - shapeLocation; Vector orientationTimesDiff = shapeOrientation * diff; Matrix orientationTimesDiffOuter = orientationTimesDiff.Outer(orientationTimesDiff); double factorValue = Math.Exp(-0.5 * shapeOrientation.QuadraticForm(diff)); double funcValue = factorValue * probDiff + labelProbFalse; Vector dFunc = probDiff * factorValue * orientationTimesDiff; Vector dLogFunc = 1.0 / funcValue * dFunc; Matrix ddLogFunc = ((orientationTimesDiffOuter + shapeOrientation) * factorValue * funcValue - orientationTimesDiffOuter * probDiff * factorValue * factorValue) * (probDiff / (funcValue * funcValue)); double x = resultForXCoord ? shapeX.Point : shapeY.Point; double d = resultForXCoord ? dLogFunc[0] : dLogFunc[1]; double dd = resultForXCoord ? ddLogFunc[0, 0] : ddLogFunc[1, 1]; return Gaussian.FromDerivatives(x, d, dd, forceProper: true); } else if (!shapeX.IsPointMass && !shapeY.IsPointMass) { VectorGaussian shapeLocationTimesFactor = ShapeLocationTimesFactor(point, shapeX, shapeY, shapeOrientation); double labelProbFalse = label.GetProbFalse(); double shapeLocationWeight = labelProbFalse; double shapeLocationTimesFactorWeight = Math.Exp(shapeLocationTimesFactor.GetLogNormalizer() - shapeX.GetLogNormalizer() - shapeY.GetLogNormalizer() - 0.5 * shapeOrientation.QuadraticForm(point)) * (1 - 2 * labelProbFalse); var projectionOfSum = new Gaussian(); projectionOfSum.SetToSum( shapeLocationWeight, resultForXCoord ? shapeX : shapeY, shapeLocationTimesFactorWeight, shapeLocationTimesFactor.GetMarginal(resultForXCoord ? 0 : 1)); Gaussian result = new Gaussian(); result.SetToRatio(projectionOfSum, resultForXCoord ? shapeX : shapeY); return result; } else { throw new NotSupportedException(); } }
/// <summary> /// VMP message to 'Sum' /// </summary> /// <param name="A">Incoming message from 'A'.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <returns>The outgoing VMP message to the 'Sum' argument</returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'Sum' as the random arguments are varied. /// The formula is <c>proj[sum_(A,B) p(A,B) factor(Sum,A,B)]</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian SumAverageLogarithm(DistributionStructArray<Bernoulli, bool> A, [SkipIfUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) Vector ma = Vector.Zero(A.Count); Vector va = Vector.Zero(A.Count); for (int i = 0; i < A.Count; i++) { ma[i] = A[i].GetMean(); va[i] = A[i].GetVariance(); } // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. var MeanOfBSquared = Vector.Zero(MeanOfB.Count); MeanOfBSquared.SetToFunction(MeanOfB, x => x * x); result.SetMeanAndVariance(ma.Inner(MeanOfB), va.Inner(MeanOfBSquared) + CovarianceOfB.QuadraticForm(ma) + va.Inner(CovarianceOfB.Diagonal())); return result; }
/// <summary> /// VMP message to 'Sum' /// </summary> /// <param name="A">Constant value for 'A'.</param> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="MeanOfB">Buffer 'MeanOfB'.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <returns>The outgoing VMP message to the 'Sum' argument</returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'Sum' as the random arguments are varied. /// The formula is <c>proj[sum_(B) p(B) factor(Sum,A,B)]</c>. /// </para><para> /// Uses John Winn's rule for deterministic factors. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Gaussian SumAverageLogarithm(bool[] A, [SkipIfUniform] VectorGaussian B, Vector MeanOfB, PositiveDefiniteMatrix CovarianceOfB) { Gaussian result = new Gaussian(); // p(x|a,b) = N(E[a]'*E[b], E[b]'*var(a)*E[b] + E[a]'*var(b)*E[a] + trace(var(a)*var(b))) Vector ma = Vector.FromArray(A.Select(x => x?1.0:0.0).ToArray()); // Uses John Winn's rule for deterministic factors. // Strict variational inference would set the variance to 0. result.SetMeanAndVariance(ma.Inner(MeanOfB), CovarianceOfB.QuadraticForm(ma)); return result; }
public static Bernoulli LabelAverageConditional( Vector point, double shapeX, double shapeY, PositiveDefiniteMatrix shapeOrientation) { Vector shapeLocation = Vector.FromArray(shapeX, shapeY); return new Bernoulli(Math.Exp(-0.5 * shapeOrientation.QuadraticForm(point - shapeLocation))); }