Пример #1
0
 public static double AverageLogFactor(
     Vector SampleMean,
     double scaling,
     GPBuffer Buffer)
 {
     return(VectorGaussianScaledPrecisionOp.AverageLogFactor(SampleMean, scaling, Buffer.ESamplePrecisionSample, Buffer.PrecisionMeanLogDet));
 }
Пример #2
0
 public static VectorGaussian SampleAverageLogarithm(double scaling, GPBuffer Buffer, VectorGaussian result)
 {
     result.Precision.SetTo(Buffer.Precision);
     if (scaling != 1.0)
     {
         result.Precision.Scale(scaling);
     }
     return(result);
 }
Пример #3
0
 /// <summary>
 /// Uses the KernelOptimiser class to optimise the hypers given the current variational posterior
 /// on the function values (which has mean SampleMean and covariance SampleCovariance)
 /// </summary>
 public static GPBuffer BufferHelper(int[] hypersToOptimise, GPBuffer Buffer, Vector[] x, Vector SampleMean, PositiveDefiniteMatrix SampleVariance, Gamma scaling)
 {
     if (SampleMean.All(o => o == 0.0))
     {
         Buffer.Precision = Utils.GramMatrix(Buffer.kernel, x).Inverse();
     }
     else
     {
         //Console.WriteLine(Utils.KernelToArray(Buffer.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
         var helper = new KernelOptimiser(settings);
         helper.kernel           = Buffer.kernel;
         helper.xData            = x;
         helper.hypersToOptimise = hypersToOptimise;
         helper.Optimise((prec, gradK, gradientVector) =>
                         helperFunction(prec, gradK, gradientVector, scaling, SampleMean,
                                        SampleVariance), ref Buffer.Precision);
         Buffer.ESamplePrecisionSample = VectorGaussianScaledPrecisionOp.ESamplePrecisionSample(SampleMean, SampleVariance, Buffer.Precision);
         Buffer.PrecisionMeanLogDet    = VectorGaussianScaledPrecisionOp.PrecisionMeanLogDet(Buffer.Precision);
         //Console.WriteLine(Utils.KernelToArray(Buffer.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
         rememberKernel = Buffer.kernel;
     }
     return(Buffer);
 }
Пример #4
0
 public static GPBuffer Buffer(int[] hypersToOptimise, GPBuffer Buffer, Vector[] x, [SkipIfUniform] Vector SampleMean, [SkipIfUniform] PositiveDefiniteMatrix SampleVariance, double scaling)
 {
     return(BufferHelper(hypersToOptimise, Buffer, x, SampleMean, SampleVariance, Gamma.PointMass(scaling)));
 }
Пример #5
0
 public static Gamma ScalingAverageLogarithm([Fresh] GPBuffer Buffer) //SampleVector SampleMean, [Fresh, SkipIfUniform] PositiveDefiniteMatrix SampleVariance, )
 {
     return(Gamma.FromNatural(.5 * Buffer.Precision.Rows, .5 * Buffer.ESamplePrecisionSample /*(SampleMean, SampleVariance, Precision)*/));
 }
Пример #6
0
 /// <summary>
 /// VMP message to 'sample'
 /// </summary>
 /// <param name="Mean">Constant value for 'mean'.</param>
 /// <param name="Precision">Constant value for 'precision'.</param>
 /// <param name="result">Modified to contain the outgoing message</param>
 /// <returns><paramref name="result"/></returns>
 /// <remarks><para>
 /// The outgoing message is the factor viewed as a function of 'sample' conditioned on the given values.
 /// </para></remarks>
 public static VectorGaussian SampleAverageLogarithm([SkipIfUniform] Gamma scaling, GPBuffer Buffer, VectorGaussian result)
 {
     result.Precision.SetTo(Buffer.Precision);
     result.Precision.Scale(scaling.GetMean());
     return(result);
 }