Beispiel #1
0
 /// <summary>
 /// Predictive coariance at a given list of points
 /// </summary>
 /// <param name="XList">List of inputs</param>
 /// <returns>Predictive covariance</returns>
 public PositiveDefiniteMatrix Covariance(IList <Vector> XList)
 {
     if (IsUniform())
     {
         VectorGaussian temp = new VectorGaussian(XList.Count);
         temp.SetToUniform();
         return(temp.GetVariance());
     }
     else
     {
         PositiveDefiniteMatrix kXX = FixedParameters.Prior.Covariance(XList);
         Matrix kXB = FixedParameters.KernelOf_X_B(XList);
         kXX.SetToDifference(kXX, kXB * Beta * kXB.Transpose());
         return(kXX);
     }
 }
Beispiel #2
0
        public static VectorGaussian extendByOneDimension(VectorGaussian x, Gaussian marg)
        {
            var mean     = x.GetMean();
            var variance = x.GetVariance();
            var newMean  = Vector.Zero(x.Dimension + 1);
            var newVar   = new PositiveDefiniteMatrix(x.Dimension + 1, x.Dimension + 1);

            for (int i = 0; i < x.Dimension; i++)
            {
                newMean[i] = mean[i];
                for (int j = 0; j < x.Dimension; j++)
                {
                    newVar[i, j] = variance[i, j];
                }
            }
            newMean[x.Dimension]             = marg.GetMean();
            newVar[x.Dimension, x.Dimension] = marg.GetVariance();
            return(VectorGaussian.FromMeanAndVariance(newMean, newVar));
        }
Beispiel #3
0
 public static PositiveDefiniteMatrix BVariance([Proper] VectorGaussian b, PositiveDefiniteMatrix result)
 {
     return(b.GetVariance(result));
 }
Beispiel #4
0
 public static PositiveDefiniteMatrix SumVariance([Proper] VectorGaussian sum, PositiveDefiniteMatrix result)
 {
     return(sum.GetVariance(result));
 }
 /// <summary>
 /// Update the buffer 'SourceVariance'
 /// </summary>
 /// <param name="Source">Incoming message from 'source'. Must be a proper distribution.  If any element is uniform, the result will be uniform.</param>
 /// <param name="result">Modified to contain the outgoing message</param>
 /// <returns><paramref name="result"/></returns>
 /// <remarks><para>
 ///
 /// </para></remarks>
 /// <exception cref="ImproperMessageException"><paramref name="Source"/> is not a proper distribution</exception>
 public static PositiveDefiniteMatrix SourceVariance([Proper] VectorGaussian Source, PositiveDefiniteMatrix result)
 {
     return(Source.GetVariance(result));
 }
 /// <summary>
 /// Update the buffer 'ArrayVariance'
 /// </summary>
 /// <param name="array">Incoming message from 'array'. Must be a proper distribution.  If any element is uniform, the result will be uniform.</param>
 /// <param name="result">Modified to contain the outgoing message</param>
 /// <returns><paramref name="result"/></returns>
 /// <remarks><para>
 ///
 /// </para></remarks>
 /// <exception cref="ImproperMessageException"><paramref name="array"/> is not a proper distribution</exception>
 public static PositiveDefiniteMatrix ArrayVariance([Proper] VectorGaussian array, PositiveDefiniteMatrix result)
 {
     return(array.GetVariance(result));
 }
Beispiel #7
0
 /// <summary>
 /// Update the buffer 'CovarianceOfB'
 /// </summary>
 /// <param name="B">Incoming message from 'B'. Must be a proper distribution.  If any element is uniform, the result will be uniform.</param>
 /// <param name="result">Storage for result.</param>
 /// <returns>New value of buffer 'CovarianceOfB'</returns>
 /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception>
 public static PositiveDefiniteMatrix CovarianceOfB([Proper] VectorGaussian B, PositiveDefiniteMatrix result)
 {
     return(B.GetVariance(result));
 }
Beispiel #8
0
        /// <summary>
        /// Wrapper function for multivariate volatility experiments. This can calculate predictive covariances either
        /// just for the training data (times 1-training), or it can do one step ahead predictions for "steps" steps.
        /// </summary>
        /// <param name="s">Algorithm settings</param>
        /// <param name="resultsDir">Where to save results</param>
        /// <param name="dataFileName">Text file containing training data</param>
        /// <param name="scaling">Rescaling of the data</param>
        /// <param name="normalisation">Whether to normalise the data</param>
        /// <param name="training">Consider [1,training] as training data</param>
        /// <param name="steps">How many look ahead predictive steps to do</param>
        /// <param name="historicalOnly">Should we only calculate covariances on the training data?</param>
        static void MultivariateVolatilityPredictions(Settings s,
                                                      string resultsDir,
                                                      string dataFileName,
                                                      double scaling      = 1.0,
                                                      bool normalisation  = true,
                                                      int training        = 200,
                                                      int steps           = 200,
                                                      bool historicalOnly = false)
        {
            // Load data
            var X          = Enumerable.Range(0, training).Select(i => Vector.Constant(1, i)).ToArray();
            var dataJagged = File.ReadAllLines(dataFileName).Take(training)
                             .Select <string, double[]>(i => i.Split(new char[] { ' ', ',', '\t' }, StringSplitOptions.RemoveEmptyEntries)
                                                        .Select(j => double.Parse(j) * scaling).ToArray()).ToArray();

            double[,] y = Utils.JaggedToFlat(Utils.transpose(dataJagged));
            if (normalisation)
            {
                using (var sw = new StreamWriter(resultsDir + @"/normalisation.txt"))
                    y = Utils.NormaliseRows(y, sw: sw, useFirst: 200);
            }
            var missing = new bool[y.GetLength(0), y.GetLength(1)];

            // Train model
            var model = (new Wrappers()).NetworkModelNodeNoiseCA(X, y, missing, s, swfn: resultsDir + @"/results_init.txt");

            // calculate predictive covariances on training data
            var histVars = Utilities.HistoricalPredictiveCovariances(model);

            using (var sw = new StreamWriter(resultsDir + @"/historicalPredVars.txt"))
            {
                for (int i = 0; i < X.Length; i++)
                {
                    sw.WriteLine(histVars[i].SourceArray.Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                }
            }

            // calculate noise covariances on training data
            var histNoiseVars = Utilities.HistoricalNoiseCovariances(model);

            using (var sw = new StreamWriter(resultsDir + @"/historicalNoiseVars.txt"))
            {
                for (int i = 0; i < X.Length; i++)
                {
                    sw.WriteLine(histNoiseVars[i].SourceArray.Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                }
            }

            if (historicalOnly)
            {
                return;
            }

            // Get the variational posterior so we can warm start the training at each look ahead step
            var nodeNoisePrecs  = model.Marginal <Gamma[]>("nodeNoisePrecisions").Select(i => i.GetMean()).ToArray();
            var nodeSignalPrecs = model.Marginal <Gamma[]>("nodeSignalPrecisions").Select(i => i.GetMean()).ToArray();
            var obsNoisePrec    = model.Marginal <Gamma>("noisePrecision").GetMean();
            var finit           = model.Marginal <VectorGaussian[]>("nodeFunctions");
            var winit           = model.Marginal <VectorGaussian[, ]>("weightFunctions");

            // Do "steps" look ahead steps
            for (int step = 0; step < steps; step++)
            {
                X          = Enumerable.Range(0, training + step + 1).Select(i => Vector.Constant(1, i)).ToArray();
                dataJagged = File.ReadAllLines(dataFileName).Take(training + step + 1)
                             .Select <string, double[]>(i => i.Split(new char[] { ' ', ',', '\t' }, StringSplitOptions.RemoveEmptyEntries)
                                                        .Select(j => double.Parse(j) * scaling).ToArray()).ToArray();
                y = Utils.JaggedToFlat(Utils.transpose(dataJagged));
                if (normalisation)
                {
                    y = Utils.NormaliseRows(y, useFirst: training + step);
                }

                // Rerun training each step but warm start with finit and winit
                VectorGaussian prediction = (new gpnetworkModel()).GPRN_MultivariateVolatility(X, y,
                                                                                               nodeSignalPrecs, nodeNoisePrecs, obsNoisePrec, ref finit, ref winit, model.nodeKernelOptimiser.kernel,
                                                                                               model.weightKernelOptimiser.kernel);

                using (var sw = new StreamWriter(resultsDir + @"/predictionMean" + step + ".txt"))
                    sw.WriteLine(prediction.GetMean().ToArray().Select(i => i.ToString()).Aggregate((p, q) => p + " " + q));
                using (var sw = new StreamWriter(resultsDir + @"/predictionVar" + step + ".txt"))
                    sw.WriteLine(prediction.GetVariance().SourceArray.Select(i => i.ToString()).Aggregate((p, q) => p + " " + q));
            }
            ;
        }
Beispiel #9
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="MatrixVectorProductOp"]/message_doc[@name="AAverageConditional(VectorGaussian, DistributionArray2D{Gaussian, double}, Vector, PositiveDefiniteMatrix, DistributionStructArray2D{Gaussian, double})"]/*'/>
        public static DistributionStructArray2D <Gaussian, double> AAverageConditional([SkipIfUniform] VectorGaussian product, DistributionArray2D <Gaussian, double> A, Vector BMean, PositiveDefiniteMatrix BVariance, DistributionStructArray2D <Gaussian, double> result)
        {
            if (product.IsUniform())
            {
                result.SetToUniform();
                return(result);
            }
            if (!A.IsPointMass)
            {
                throw new ArgumentException("A is not a point mass");
            }
            // logZ = log N(mProduct; A*BMean, vProduct + A*BVariance*A')
            //      = -0.5 (mProduct - A*BMean)' inv(vProduct + A*BVariance*A') (mProduct - A*BMean) - 0.5 logdet(vProduct + A*BVariance*A')
            //      = -0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean)
            //        - 0.5 logdet(pProduct + pProduct*A*BVariance*A'*pProduct) + logdet(pProduct)
            // dlogZ   = 0.5 (dA*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean)
            //         +0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (dA*BMean)
            //         +0.5 (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) (pProduct*dA*BVariance*A'*pProduct + pProduct*A*BVariance*dA'*pProduct) inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean)
            //         - 0.5 tr(inv(pProduct + pProduct*A*BVariance*A'*pProduct) (pProduct*dA*BVariance*A'*pProduct + pProduct*A*BVariance*dA'*pProduct))
            // dlogZ/dA = pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) BMean'
            //          + pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct (mProduct - A*BMean) (mProduct - A*BMean)' pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct*A*BVariance
            //          - pProduct inv(pProduct + pProduct*A*BVariance*A'*pProduct) pProduct A*BVariance
            var Amatrix                 = new Matrix(A.Point);
            var pProductA               = product.Precision * Amatrix;
            var pProductABV             = pProductA * BVariance;
            PositiveDefiniteMatrix prec = new PositiveDefiniteMatrix(product.Dimension, product.Dimension);

            prec.SetToSum(product.Precision, pProductABV * pProductA.Transpose());
            // pProductA is now free
            for (int i = 0; i < prec.Rows; i++)
            {
                if (prec[i, i] == 0)
                {
                    prec[i, i] = 1;
                }
            }
            var v           = prec.Inverse();
            var ABM         = Amatrix * BMean;
            var pProductABM = product.Precision * ABM;
            var diff        = pProductABM;

            diff.SetToDifference(product.MeanTimesPrecision, pProductABM);
            // ABM is now free
            var pProductV     = product.Precision * v;
            var pProductVdiff = ABM;

            pProductVdiff.SetToProduct(pProductV, diff);
            var Vdiff = v * diff;

            pProductV.Scale(-1);
            pProductV.SetToSumWithOuter(pProductV, 1, pProductVdiff, Vdiff);
            Matrix dlogZ = pProductA;

            dlogZ.SetToProduct(pProductV, pProductABV);
            dlogZ.SetToSumWithOuter(dlogZ, 1, pProductVdiff, BMean);
            int rows = A.GetLength(0);
            int cols = A.GetLength(1);

            for (int i = 0; i < rows; i++)
            {
                for (int j = 0; j < cols; j++)
                {
                    double dlogp = dlogZ[i, j];
                    // for now, we don't compute the second derivative.
                    double ddlogp = -1;
                    result[i, j] = Gaussian.FromDerivatives(A[i, j].Point, dlogp, ddlogp, false);

                    bool check = false;
                    if (check)
                    {
                        double logZ(Matrix m)
                        {
                            var vgm = ProductAverageConditional(m, BMean, BVariance, new VectorGaussianMoments(m.Rows));

                            return(VectorGaussian.GetLogProb(product.GetMean(), vgm.Mean, product.GetVariance() + vgm.Variance));
                        }

                        var    Amatrix2 = (Matrix)Amatrix.Clone();
                        double delta    = 1e-4;
                        Amatrix2[i, j] += delta;
                        double dlogp2 = (logZ(Amatrix2) - logZ(Amatrix)) / delta;
                        if (MMath.AbsDiff(dlogp, dlogp2, 1e-10) > 1e-5)
                        {
                            throw new Exception();
                        }
                    }
                }
            }
            return(result);
        }