/// <summary> /// VMP message to 'first' /// </summary> /// <param name="concat">Incoming message from 'concat'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="second">Incoming message from 'second'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'first'. /// Because the factor is deterministic, 'concat' is integrated out before taking the logarithm. /// The formula is <c>exp(sum_(second) p(second) log(sum_concat p(concat) factor(concat,first,second)))</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="concat"/> is not a proper distribution</exception> public static VectorGaussian FirstAverageLogarithm([SkipIfUniform] VectorGaussian concat, VectorGaussian second, VectorGaussian result) { // prec = concat.Precision[dim1,dim1] // meanTimesPrec = concat.MeanTimesPrecision[dim1] - concat.Precision[dim1,dim2]*second.Mean Vector mSecond = second.GetMean(); return FirstAverageConditional(concat, mSecond, result); }
/// <summary> /// VMP message to 'second' /// </summary> /// <param name="concat">Incoming message from 'concat'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="first">Incoming message from 'first'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'second'. /// Because the factor is deterministic, 'concat' is integrated out before taking the logarithm. /// The formula is <c>exp(sum_(first) p(first) log(sum_concat p(concat) factor(concat,first,second)))</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="concat"/> is not a proper distribution</exception> public static VectorGaussian SecondAverageLogarithm([SkipIfUniform] VectorGaussian concat, VectorGaussian first, VectorGaussian result) { Vector mFirst = first.GetMean(); return SecondAverageConditional(concat, mFirst, result); }
/// <summary> /// Update the buffer 'MeanOfB' /// </summary> /// <param name="B">Incoming message from 'B'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="CovarianceOfB">Buffer 'CovarianceOfB'.</param> /// <param name="result">Storage for result</param> /// <returns>New value of buffer 'MeanOfB'</returns> /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception> public static Vector MeanOfB([Proper] VectorGaussian B, PositiveDefiniteMatrix CovarianceOfB, Vector result) { return(B.GetMean(result, CovarianceOfB)); }
public static Vector BMean([Proper] VectorGaussian B, PositiveDefiniteMatrix BVariance, Vector result) { return(B.GetMean(result, BVariance)); }
/// <summary>Update the buffer <c>SourceMean</c>.</summary> /// <param name="Source">Incoming message from <c>source</c>. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="SourceVariance">Buffer <c>SourceVariance</c>.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns> /// <paramref name="result" /> /// </returns> /// <remarks> /// <para /> /// </remarks> /// <exception cref="ImproperMessageException"> /// <paramref name="Source" /> is not a proper distribution.</exception> public static Vector SourceMean([Proper] VectorGaussian Source, [Fresh] PositiveDefiniteMatrix SourceVariance, Vector result) { return(Source.GetMean(result, SourceVariance)); }
/// <summary>Update the buffer <c>ArrayMean</c>.</summary> /// <param name="array">Incoming message from <c>array</c>. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="ArrayVariance">Buffer <c>ArrayVariance</c>.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns> /// <paramref name="result" /> /// </returns> /// <remarks> /// <para /> /// </remarks> /// <exception cref="ImproperMessageException"> /// <paramref name="array" /> is not a proper distribution.</exception> public static Vector ArrayMean([Proper] VectorGaussian array, [Fresh] PositiveDefiniteMatrix ArrayVariance, Vector result) { return(array.GetMean(result, ArrayVariance)); }
/// <summary> /// Update the buffer 'AMean' /// </summary> /// <param name="A">Incoming message from 'a'. Must be a proper distribution. If any element is uniform, the result will be uniform.</param> /// <param name="AVariance">Buffer 'AVariance'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="A"/> is not a proper distribution</exception> public static Vector AMean([Proper] VectorGaussian A, [Fresh] PositiveDefiniteMatrix AVariance, Vector result) { return(A.GetMean(result, AVariance)); }
/// <summary> /// Wrapper function for multivariate volatility experiments. This can calculate predictive covariances either /// just for the training data (times 1-training), or it can do one step ahead predictions for "steps" steps. /// </summary> /// <param name="s">Algorithm settings</param> /// <param name="resultsDir">Where to save results</param> /// <param name="dataFileName">Text file containing training data</param> /// <param name="scaling">Rescaling of the data</param> /// <param name="normalisation">Whether to normalise the data</param> /// <param name="training">Consider [1,training] as training data</param> /// <param name="steps">How many look ahead predictive steps to do</param> /// <param name="historicalOnly">Should we only calculate covariances on the training data?</param> static void MultivariateVolatilityPredictions(Settings s, string resultsDir, string dataFileName, double scaling = 1.0, bool normalisation = true, int training = 200, int steps = 200, bool historicalOnly = false) { // Load data var X = Enumerable.Range(0, training).Select(i => Vector.Constant(1, i)).ToArray(); var dataJagged = File.ReadAllLines(dataFileName).Take(training) .Select <string, double[]>(i => i.Split(new char[] { ' ', ',', '\t' }, StringSplitOptions.RemoveEmptyEntries) .Select(j => double.Parse(j) * scaling).ToArray()).ToArray(); double[,] y = Utils.JaggedToFlat(Utils.transpose(dataJagged)); if (normalisation) { using (var sw = new StreamWriter(resultsDir + @"/normalisation.txt")) y = Utils.NormaliseRows(y, sw: sw, useFirst: 200); } var missing = new bool[y.GetLength(0), y.GetLength(1)]; // Train model var model = (new Wrappers()).NetworkModelNodeNoiseCA(X, y, missing, s, swfn: resultsDir + @"/results_init.txt"); // calculate predictive covariances on training data var histVars = Utilities.HistoricalPredictiveCovariances(model); using (var sw = new StreamWriter(resultsDir + @"/historicalPredVars.txt")) { for (int i = 0; i < X.Length; i++) { sw.WriteLine(histVars[i].SourceArray.Select(o => o.ToString()).Aggregate((p, q) => p + " " + q)); } } // calculate noise covariances on training data var histNoiseVars = Utilities.HistoricalNoiseCovariances(model); using (var sw = new StreamWriter(resultsDir + @"/historicalNoiseVars.txt")) { for (int i = 0; i < X.Length; i++) { sw.WriteLine(histNoiseVars[i].SourceArray.Select(o => o.ToString()).Aggregate((p, q) => p + " " + q)); } } if (historicalOnly) { return; } // Get the variational posterior so we can warm start the training at each look ahead step var nodeNoisePrecs = model.Marginal <Gamma[]>("nodeNoisePrecisions").Select(i => i.GetMean()).ToArray(); var nodeSignalPrecs = model.Marginal <Gamma[]>("nodeSignalPrecisions").Select(i => i.GetMean()).ToArray(); var obsNoisePrec = model.Marginal <Gamma>("noisePrecision").GetMean(); var finit = model.Marginal <VectorGaussian[]>("nodeFunctions"); var winit = model.Marginal <VectorGaussian[, ]>("weightFunctions"); // Do "steps" look ahead steps for (int step = 0; step < steps; step++) { X = Enumerable.Range(0, training + step + 1).Select(i => Vector.Constant(1, i)).ToArray(); dataJagged = File.ReadAllLines(dataFileName).Take(training + step + 1) .Select <string, double[]>(i => i.Split(new char[] { ' ', ',', '\t' }, StringSplitOptions.RemoveEmptyEntries) .Select(j => double.Parse(j) * scaling).ToArray()).ToArray(); y = Utils.JaggedToFlat(Utils.transpose(dataJagged)); if (normalisation) { y = Utils.NormaliseRows(y, useFirst: training + step); } // Rerun training each step but warm start with finit and winit VectorGaussian prediction = (new gpnetworkModel()).GPRN_MultivariateVolatility(X, y, nodeSignalPrecs, nodeNoisePrecs, obsNoisePrec, ref finit, ref winit, model.nodeKernelOptimiser.kernel, model.weightKernelOptimiser.kernel); using (var sw = new StreamWriter(resultsDir + @"/predictionMean" + step + ".txt")) sw.WriteLine(prediction.GetMean().ToArray().Select(i => i.ToString()).Aggregate((p, q) => p + " " + q)); using (var sw = new StreamWriter(resultsDir + @"/predictionVar" + step + ".txt")) sw.WriteLine(prediction.GetVariance().SourceArray.Select(i => i.ToString()).Aggregate((p, q) => p + " " + q)); } ; }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MatrixVectorProductOp"]/message_doc[@name="AAverageConditional(VectorGaussian, DistributionArray2D{Gaussian, double}, Vector, PositiveDefiniteMatrix, DistributionStructArray2D{Gaussian, double})"]/*'/> public static DistributionStructArray2D <Gaussian, double> AAverageConditional([SkipIfUniform] VectorGaussian product, DistributionArray2D <Gaussian, double> A, Vector BMean, PositiveDefiniteMatrix BVariance, DistributionStructArray2D <Gaussian, double> result) { if (product.IsUniform()) { result.SetToUniform(); return(result); } if (!A.IsPointMass) { throw new ArgumentException("A is not a point mass"); } // logZ = log N(mProduct; A*BMean, vProduct + A*BVariance*A') // = -0.5 (mProduct - A*BMean)' inv(vProduct + A*BVariance*A') (mProduct - A*BMean) - 0.5 logdet(vProduct + A*BVariance*A') // = -0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) // - 0.5 logdet(pProduct + pProduct*A*BVariance*A'*pProduct) + logdet(pProduct) // dlogZ = 0.5 (dA*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) // +0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (dA*BMean) // +0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) (pProduct*dA*BVariance*A'*pProduct + pProduct*A*BVariance*dA'*pProduct) inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) // - 0.5 tr(inv(pProduct + pProduct*A*BVariance*A'*pProduct) (pProduct*dA*BVariance*A'*pProduct + pProduct*A*BVariance*dA'*pProduct)) // dlogZ/dA = pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) BMean' // + pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct*A*BVariance // - pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct A*BVariance var Amatrix = new Matrix(A.Point); var pProductA = product.Precision * Amatrix; var pProductABV = pProductA * BVariance; PositiveDefiniteMatrix prec = new PositiveDefiniteMatrix(product.Dimension, product.Dimension); prec.SetToSum(product.Precision, pProductABV * pProductA.Transpose()); // pProductA is now free for (int i = 0; i < prec.Rows; i++) { if (prec[i, i] == 0) { prec[i, i] = 1; } } var v = prec.Inverse(); var ABM = Amatrix * BMean; var pProductABM = product.Precision * ABM; var diff = pProductABM; diff.SetToDifference(product.MeanTimesPrecision, pProductABM); // ABM is now free var pProductV = product.Precision * v; var pProductVdiff = ABM; pProductVdiff.SetToProduct(pProductV, diff); var Vdiff = v * diff; pProductV.Scale(-1); pProductV.SetToSumWithOuter(pProductV, 1, pProductVdiff, Vdiff); Matrix dlogZ = pProductA; dlogZ.SetToProduct(pProductV, pProductABV); dlogZ.SetToSumWithOuter(dlogZ, 1, pProductVdiff, BMean); int rows = A.GetLength(0); int cols = A.GetLength(1); for (int i = 0; i < rows; i++) { for (int j = 0; j < cols; j++) { double dlogp = dlogZ[i, j]; // for now, we don't compute the second derivative. double ddlogp = -1; result[i, j] = Gaussian.FromDerivatives(A[i, j].Point, dlogp, ddlogp, false); bool check = false; if (check) { double logZ(Matrix m) { var vgm = ProductAverageConditional(m, BMean, BVariance, new VectorGaussianMoments(m.Rows)); return(VectorGaussian.GetLogProb(product.GetMean(), vgm.Mean, product.GetVariance() + vgm.Variance)); } var Amatrix2 = (Matrix)Amatrix.Clone(); double delta = 1e-4; Amatrix2[i, j] += delta; double dlogp2 = (logZ(Amatrix2) - logZ(Amatrix)) / delta; if (MMath.AbsDiff(dlogp, dlogp2, 1e-10) > 1e-5) { throw new Exception(); } } } } return(result); }
public static DenseVector AMean([Proper] VectorGaussian A, PositiveDefiniteMatrix AVariance, DenseVector result) { return((DenseVector)A.GetMean(result, AVariance)); }
public static Vector AMean([Proper] VectorGaussian a, PositiveDefiniteMatrix aVariance, Vector result) { return(a.GetMean(result, aVariance)); }
public static Vector SumMean([Proper] VectorGaussian sum, PositiveDefiniteMatrix sumVariance, Vector result) { return(sum.GetMean(result, sumVariance)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ConcatOp"]/message_doc[@name="SecondAverageLogarithm(VectorGaussian, VectorGaussian, VectorGaussian)"]/*'/> public static VectorGaussian SecondAverageLogarithm([SkipIfUniform] VectorGaussian concat, VectorGaussian first, VectorGaussian result) { Vector mFirst = first.GetMean(); return(SecondAverageConditional(concat, mFirst, result)); }