Esempio n. 1
0
        /// <summary>
        /// Get the predictive distribution for the N+1 time, used for the multivariate volatility experiments
        /// </summary>
        /// <param name="model"></param>
        /// <returns></returns>
        public static VectorGaussian CorrelatedPredictions(INetworkModel model)
        {
            var noisePrecisionPost = model.Marginal <Gamma[]>("noisePrecisionArray");
            var f    = model.Marginal <Gaussian[][]>("nodeFunctionValuesPredictive");
            var W    = model.Marginal <Gaussian[, ][]>("weightFunctionValuesPredictive");
            int ni   = model.N - 1;
            var mean = Vector.Zero(model.D);
            var cov  = new PositiveDefiniteMatrix(model.D, model.D);

            for (int i = 0; i < model.D; i++)
            {
                cov[i, i] = noisePrecisionPost[i].GetMeanInverse();
                for (int k = 0; k < model.Q; k++)
                {
                    mean[i]   += W[i, k][ni].GetMean() * f[k][ni].GetMean();
                    cov[i, i] += W[i, k][ni].GetVariance() * (f[k][ni].GetMean() * f[k][ni].GetMean() + f[k][ni].GetVariance());
                }
                for (int j = 0; j < model.D; j++)
                {
                    for (int k = 0; k < model.Q; k++)
                    {
                        cov[i, j] += W[i, k][ni].GetMean() * W[j, k][ni].GetMean() * f[k][ni].GetVariance();
                    }
                }
            }
            return(VectorGaussian.FromMeanAndVariance(mean, cov));
        }
Esempio n. 2
0
        public IFunction Sample()
        {
            if (this.FixedParameters.NumberFeatures > 1)
            {
                throw new Exception("Sampling of a Sparse Gaussian Process is not supported for input spaces of dimension > 1.");
            }

            if (this.FixedParameters.NumberBasisPoints <= 0)
            {
                return(((Sampleable <IFunction>) this.FixedParameters.Prior).Sample());
            }

            // Try to find a reasonable range to sample from
            int    numSamplePoints = 51;
            double maxabs          = 1.0;

            for (int i = 0; i < this.FixedParameters.NumberBasisPoints; i++)
            {
                double absb = Math.Abs(this.FixedParameters.Basis[i][0]);
                if (maxabs < absb)
                {
                    maxabs = absb;
                }
            }
            maxabs *= 1.5; // Go beyond the basis points
            List <Vector> x      = new List <Vector>(numSamplePoints);
            double        increm = (2.0 * maxabs) / ((double)(numSamplePoints - 1));
            double        start  = -maxabs;
            double        currx  = start;

            for (int i = 0; i < numSamplePoints; i++)
            {
                Vector xv = Vector.Zero(1);
                xv[0] = currx;
                x.Add(xv);
                currx += increm;
            }

            // x now contains the set of input points at which we'll sample the
            // posterior function distribution
            VectorGaussian vg = VectorGaussian.FromMeanAndVariance(Mean(x), Covariance(x));
            // Sample to get the outputs
            Vector y = vg.Sample();

            // Build the spline
            LinearSpline ls = new LinearSpline();

            ls.KnotStart  = start;
            ls.KnotIncrem = increm;
            ls.YPoints    = y;

            return(ls as IFunction);
        }
Esempio n. 3
0
        public Mixture <VectorGaussian> ToMixture()
        {
            Mixture <VectorGaussian> mixture = new Mixture <VectorGaussian>();

            for (int i = 0; i < weights.Count; ++i)
            {
                mixture.Add(
                    VectorGaussian.FromMeanAndVariance(
                        MicrosoftResearch.Infer.Maths.Vector.FromArray(this.means[i]),
                        new PositiveDefiniteMatrix(JaggedArrayToMatrix(this.variances[i]))),
                    this.weights[i]);
            }
            return(mixture);
        }
Esempio n. 4
0
        /// <summary>
        /// The entry point of the program, where the program control starts and ends.
        /// </summary>
        public static void Main()
        {
            const int  T = 100;
            const int  K = 2;
            const int  N = 5;
            const bool showFactorGraph = false;

            TestHMM <ContinousHMM, double, double, Gaussian, Gaussian, double, Gamma, double>(
                T,
                K,
                1,
                Gaussian.FromMeanAndPrecision,
                () => Gaussian.FromMeanAndVariance(0, 1000),
                () => Gamma.FromShapeAndScale(1000, 0.001),
                showFactorGraph);

            // TModel, TEmit, TEmitDist, TEmitMeanDist, TEmitMean, TEmitPrecDist, TEmitPrec
            TestHMM <MultivariateHMM, Vector, Vector, VectorGaussian, VectorGaussian, Vector, Wishart, PositiveDefiniteMatrix>(
                T,
                K,
                1,
                VectorGaussian.FromMeanAndPrecision,
                () => VectorGaussian.FromMeanAndVariance(Vector.Zero(N), PositiveDefiniteMatrix.IdentityScaledBy(N, 1000)),
                () => Wishart.FromShapeAndScale(N, PositiveDefiniteMatrix.IdentityScaledBy(N, 0.001)),
                showFactorGraph);

            TestHMM <BinaryHMM, bool, double, Bernoulli, Beta, double, Beta, double>(
                T,
                K,
                1,
                (m, p) => new Bernoulli(m),
                () => new Beta(1, 1),
                null,
                showFactorGraph);

            TestHMM <DiscreteHMM, int, double, Discrete, Dirichlet, Vector, Dirichlet, Vector>(
                T,
                K,
                N,
                (m, p) => new Discrete(m),
                () => Dirichlet.Uniform(N),
                null,
                showFactorGraph);

            // TestBinaryHiddenMarkovModel();
            // TestDiscreteHiddenMarkovModel();
            // TestMultivariateHMM();
        }
Esempio n. 5
0
        public void MP_VectorGaussian2()
        {
            var nDimensions = Variable.New <int>().Named("nDimensions");
            var d           = new Range(nDimensions).Named("d");
            var meanPrior   = Variable.New <VectorGaussian>().Named("meanPrior");
            var mean        = Variable <Vector> .Random(meanPrior).Named("mean");

            mean.SetValueRange(d);
            mean.AddAttribute(new MarginalPrototype(new VectorGaussian(1)));
            var precision = Variable.Observed(PositiveDefiniteMatrix.Identity(1)).Named("precision");
            var x         = Variable.VectorGaussianFromMeanAndPrecision(mean, precision).Named("x");

            mean.ObservedValue        = Vector.Zero(1);
            meanPrior.ObservedValue   = VectorGaussian.FromMeanAndVariance(0, 1);
            nDimensions.ObservedValue = 1;
            InferenceEngine engine = new InferenceEngine();

            Console.WriteLine(engine.Infer <VectorGaussian>(x));
        }
Esempio n. 6
0
        public static VectorGaussian extendByOneDimension(VectorGaussian x, Gaussian marg)
        {
            var mean     = x.GetMean();
            var variance = x.GetVariance();
            var newMean  = Vector.Zero(x.Dimension + 1);
            var newVar   = new PositiveDefiniteMatrix(x.Dimension + 1, x.Dimension + 1);

            for (int i = 0; i < x.Dimension; i++)
            {
                newMean[i] = mean[i];
                for (int j = 0; j < x.Dimension; j++)
                {
                    newVar[i, j] = variance[i, j];
                }
            }
            newMean[x.Dimension]             = marg.GetMean();
            newVar[x.Dimension, x.Dimension] = marg.GetVariance();
            return(VectorGaussian.FromMeanAndVariance(newMean, newVar));
        }
Esempio n. 7
0
#pragma warning disable 162

        /// <summary>
        /// Gets the log of the integral of the product of this SparseGP and that SparseGP
        /// </summary>
        /// <param name="that"></param>
        /// <returns></returns>
        public double GetLogAverageOf(SparseGP that)
        {
            if (this.FixedParameters != that.FixedParameters)
            {
                throw new ArgumentException("SparseGPs do not have the same FixedParameters.  this.FixedParameters = " + this.FixedParameters + ", that.FixedParameters = " +
                                            that.FixedParameters);
            }
            if (this.IncludePrior && that.IncludePrior)
            {
                throw new ArgumentException("Both SparseGPs include the prior");
            }
            if (that.IsPointMass)
            {
                return(GetLogProb(that.Point));
            }
            if (this.IsPointMass)
            {
                return(that.GetLogProb(this.Point));
            }
            if (this.IncludePrior && !that.IncludePrior)
            {
                // gBB is the distribution of the function on the basis
                VectorGaussian gBB;
                if (true)
                {
                    gBB = new VectorGaussian(InducingDist.Dimension);
                    gBB.Precision.SetToSum(FixedParameters.InvKernelOf_B_B, InducingDist.Precision);
                    gBB.MeanTimesPrecision.SetTo(InducingDist.MeanTimesPrecision); // since prior has zero mean
                }
                else
                {
                    // equivalent but slower
                    gBB = VectorGaussian.FromMeanAndVariance(Mean_B, Var_B_B);
                }
                return(gBB.GetLogAverageOf(that.InducingDist));
            }
            if (!this.IncludePrior && that.IncludePrior)
            {
                return(that.GetLogAverageOf(this));
            }
            throw new NotImplementedException();
        }
Esempio n. 8
0
        /// <summary>
        /// 根据均值和协方差数据生成高斯对象
        /// </summary>
        public void InitGaussian()
        {
            if (gaussian != null)
            {
                return;
            }

            Microsoft.ML.Probabilistic.Math.Vector mean = null;
            Microsoft.ML.Probabilistic.Math.PositiveDefiniteMatrix covar = null;
            try
            {
                mean     = this.means.toMathVector();
                covar    = new Microsoft.ML.Probabilistic.Math.PositiveDefiniteMatrix(covariance);
                gaussian = VectorGaussian.FromMeanAndVariance(mean, covar);

                checkGaussian();
            }
            catch (Exception e)
            {
                //这里异常的主要原因是得到的协方差矩阵不是正定矩阵。
                //This happens if the diagonal values of the covariance matrix are (very close to) zero.
                //A simple fix is add a very small constant number to c.
                //logger.Error(e.Message);
                for (int i = 0; i < covariance.GetLength(0); i++)
                {
                    for (int j = 0; j < covariance.GetLength(1); j++)
                    {
                        if (i == j)
                        {
                            covariance[i, j] += 0.001;
                        }
                        //if (i == j && covariance[i, j] < 0) covariance[i, j] += 0.01;
                        //if (i != j) covariance[i,j] = 0;
                    }
                }
                covar    = new Microsoft.ML.Probabilistic.Math.PositiveDefiniteMatrix(covariance);
                gaussian = VectorGaussian.FromMeanAndVariance(mean, covar);
                checkGaussian();
            }
        }
Esempio n. 9
0
        public static VectorGaussian CorrelatedPredictionsHelper(Gaussian[][] f, Gaussian[, ][] W, Gamma noisePrecisionPost, int Q, int D, int ni)
        {
            var mean = Vector.Zero(D);
            var cov  = new PositiveDefiniteMatrix(D, D);

            for (int i = 0; i < D; i++)
            {
                cov[i, i] = noisePrecisionPost.GetMeanInverse();
                for (int k = 0; k < Q; k++)
                {
                    mean[i]   += W[i, k][ni].GetMean() * f[k][ni].GetMean();
                    cov[i, i] += W[i, k][ni].GetVariance() * (f[k][ni].GetMean() * f[k][ni].GetMean() + f[k][ni].GetVariance());
                }
                for (int j = 0; j < D; j++)
                {
                    for (int k = 0; k < Q; k++)
                    {
                        cov[i, j] += W[i, k][ni].GetMean() * W[j, k][ni].GetMean() * f[k][ni].GetVariance();
                    }
                }
            }
            return(VectorGaussian.FromMeanAndVariance(mean, cov));
        }
Esempio n. 10
0
        /// <summary>
        /// Run our VB implementation of the Semi Parametric Latent Factor Model of
        /// Teh, Y., Seeger, M., and Jordan, M. (AISTATS 2005).
        /// </summary>
        public SPLFM_VMP RunSPLFM_VMP(Vector[] inputs,
                                      double[,] data,
                                      bool[,] isMissing,
                                      Settings settings,
                                      string[] errorMeasureNames = null,
                                      Converter <IPredictionSPLFMModel, double[]> modelAssessor = null,
                                      string swfn = null)
        {
            var model = new SPLFM_VMP();

            var nodeOptimiser = new KernelOptimiser(settings);

            nodeOptimiser.xData = inputs;

            nodeOptimiser.kernel           = ObjectCloner.Clone(settings.node_kernel);
            nodeOptimiser.hypersToOptimise = settings.nodeHypersToOptimise;

            var nodeFunctionsInit = Enumerable.Range(0, settings.Q).Select(i =>
                                                                           VectorGaussian.FromMeanAndVariance(
                                                                               VectorGaussian.Sample(Vector.Zero(data.GetLength(1)), PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), 100)),
                                                                               PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), settings.init_precision))).ToArray(); // should put this manually in generated code
            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            double inputsRange
                = inputs.Select(i => i[0]).Max() - inputs.Select(i => i[0]).Min();

            Console.WriteLine("Init node kernel {0}", settings.node_kernel);

            model.SetObservedValue("D", data.GetLength(0));
            model.SetObservedValue("Q", settings.Q);
            model.SetObservedValue("N", data.GetLength(1));
            model.SetObservedValue("observedData", data);
            model.SetObservedValue("nodeFunctionsInitVar", distArray);
            model.SetObservedValue("K_node_inverse", Utils.GramMatrix(nodeOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("noisePrecisionPrior", settings.noisePrecisionPrior);
            //model.SetObservedValue("nodeNoisePrecisionPrior", settings.nodeNoisePrecisionPrior);
            model.SetObservedValue("nodeSignalPrecisionsPrior", Enumerable.Range(0, settings.Q).Select(o => settings.nodeSignalPrecisionsPrior).ToArray());
            model.SetObservedValue("isMissing", isMissing);

            model.nodeKernelOptimiser = nodeOptimiser;

            model.Reset();

            var start = DateTime.Now;

            if (swfn != null)
            {
                using (var sw = new StreamWriter(swfn, true))
                {
                    sw.Write("{0} {1} {2}", "it", "time", "ml");
                    if (errorMeasureNames != null)
                    {
                        sw.Write(" " + errorMeasureNames.Aggregate((p, q) => p + " " + q));
                    }
                    sw.Write(" " + Utils.KernelHyperNames(nodeOptimiser.kernel).Select(o => "node_" + o).Aggregate((p, q) => p + " " + q));

                    sw.Write(" noise");
                    for (int i = 0; i < settings.Q; i++)
                    {
                        sw.Write(" signal" + i);
                    }
                    sw.WriteLine();
                }
            }

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < settings.max_iterations; it++)
            {
                model.Update(1);
                ml = model.Marginal <Bernoulli>("ev").LogOdds;

                var noisePrecisionPost = model.Marginal <Gamma>("noisePrecision");

                var assessment = (modelAssessor != null) ? modelAssessor(model).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q) : "";

                Console.WriteLine("It " + it + " node " + nodeOptimiser.kernel + " ml " + ml + " err  " + assessment);
                if (Math.Abs(oldML - ml) < settings.ml_tolerance)
                {
                    break;
                }

                oldML = ml;

                if (swfn != null)
                {
                    using (var sw = new StreamWriter(swfn, true))
                    {
                        var nodeSignalPrecisionsPost = model.Marginal <Gamma[]>("nodeSignalPrecisions");

                        sw.Write("{0} {1} {2}", it, (DateTime.Now - start).TotalMilliseconds, ml);
                        if (modelAssessor != null)
                        {
                            sw.Write(" " + assessment);
                        }
                        sw.Write(" " + Utils.KernelToArray(nodeOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + noisePrecisionPost.GetMeanInverse());
                        for (int i = 0; i < settings.Q; i++)
                        {
                            sw.Write(" " + nodeSignalPrecisionsPost[i].GetMeanInverse());
                        }
                        sw.WriteLine();
                    }
                }
            }


            Console.WriteLine("Finished after " + it);

            return(model);
        }
Esempio n. 11
0
        /// <summary>
        /// Run GPRN without node noise
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="settings">Algorithm settings</param>
        /// <param name="swfn">Filename for logging</param>
        /// <returns>Fitted model</returns>
        public GPRN_VMP NetworkModelCA(Vector[] inputs,
                                       double[,] data,
                                       Settings settings,
                                       string swfn = null)
        {
            bool anyIsMissing = false; // AnyIsMissing(isMissing);

            var model = new GPRN_VMP();

            var nodeOptimiser   = new KernelOptimiser(settings);
            var weightOptimiser = new KernelOptimiser(settings);

            nodeOptimiser.xData   = inputs;
            weightOptimiser.xData = inputs;

            nodeOptimiser.kernel             = ObjectCloner.Clone(settings.node_kernel);
            nodeOptimiser.hypersToOptimise   = settings.nodeHypersToOptimise;
            weightOptimiser.kernel           = ObjectCloner.Clone(settings.weight_kernel);
            weightOptimiser.hypersToOptimise = settings.weightHypersToOptimise;

            var nodeFunctionsInit = Enumerable.Range(0, settings.Q).Select(i =>
                                                                           VectorGaussian.FromMeanAndVariance(
                                                                               VectorGaussian.Sample(Vector.Zero(data.GetLength(1)), PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), 100)),
                                                                               PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), settings.init_precision))).ToArray(); // should put this manually in generated code
            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            double inputsRange
                = inputs.Select(i => i[0]).Max() - inputs.Select(i => i[0]).Min();

            Console.WriteLine("Init node kernel {0}\ninit weight kernel {1}", settings.node_kernel, settings.weight_kernel);

            model.SetObservedValue("D", data.GetLength(0));
            model.SetObservedValue("Q", settings.Q);
            model.SetObservedValue("N", data.GetLength(1));
            model.SetObservedValue("observedData", data);
            model.SetObservedValue("nodeFunctionsInitVar", distArray);
            model.SetObservedValue("K_node_inverse", Utils.GramMatrix(nodeOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("K_weights_inverse", Utils.GramMatrix(weightOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("noisePrecisionPrior", settings.noisePrecisionPrior);
            //model.SetObservedValue("nodeNoisePrecisionPrior", settings.nodeNoisePrecisionPrior);
            model.SetObservedValue("nodeSignalPrecisionsPrior", settings.nodeSignalPrecisionsPrior);
            //model.SetObservedValue("isMissing", isMissing);

            model.nodeKernelOptimiser   = nodeOptimiser;
            model.weightKernelOptimiser = weightOptimiser;

            model.Reset();

            var start = DateTime.Now;

            if (swfn != null)
            {
                using (var sw = new StreamWriter(swfn, true))
                {
                    sw.Write("{0} {1} {2}", "it", "time", "ml");
                    if (anyIsMissing)
                    {
                        sw.Write(" {0} {1}", "logProb", "error");
                    }
                    sw.Write(" " + Utils.KernelHyperNames(nodeOptimiser.kernel).Select(o => "node_" + o).Aggregate((p, q) => p + " " + q));
                    sw.Write(" " + Utils.KernelHyperNames(weightOptimiser.kernel).Select(o => "weight_" + o).Aggregate((p, q) => p + " " + q));
                    sw.Write(" noise");
                    for (int i = 0; i < settings.Q; i++)
                    {
                        sw.Write(" signal" + i);
                    }

                    sw.WriteLine();
                }
            }

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < settings.max_iterations; it++)
            {
                model.Update(1);
                ml = model.Marginal <Bernoulli>("ev").LogOdds;
                var    noisePrecisionPost = model.Marginal <Gamma>("noisePrecision");
                double logProb = 0, error = 0, MSLL = 0, SMSE = 0;

                Console.WriteLine("It {9} Time: {8:G3} Node ls=exp({0:G3})={1:G3} Weight ls=exp({2:G3})={3:G3} ml={4:G3} error={5:G3} msll={6:G3} smse={7:G3}", nodeOptimiser.kernel[0], Math.Exp(nodeOptimiser.kernel[0]),
                                  weightOptimiser.kernel[0], Math.Exp(weightOptimiser.kernel[0]), ml, error, MSLL, SMSE, (DateTime.Now - start).TotalMilliseconds, it);
                if (Math.Abs(oldML - ml) < settings.ml_tolerance)
                {
                    break;
                }

                oldML = ml;

                if (swfn != null)
                {
                    using (var sw = new StreamWriter(swfn, true))
                    {
                        var nodeSignalPrecisionsPost = model.Marginal <Gamma[]>("nodeSignalPrecisions");

                        sw.Write("{0} {1} {2}", it, (DateTime.Now - start).TotalMilliseconds, ml);
                        if (anyIsMissing)
                        {
                            sw.Write(" {0} {1}", logProb, error);
                        }
                        sw.Write(" " + Utils.KernelToArray(nodeOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + Utils.KernelToArray(weightOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + noisePrecisionPost.GetMeanInverse());
                        for (int i = 0; i < settings.Q; i++)
                        {
                            sw.Write(" " + nodeSignalPrecisionsPost[i].GetMeanInverse());
                        }
                        sw.WriteLine();
                    }
                }
            }


            Console.WriteLine("Finished after " + it);

            return(model);
        }
        public static Mixture <VectorGaussian> Fit(MicrosoftResearch.Infer.Maths.Vector[] data, int componentCount, int retryCount, double tolerance = 1e-4)
        {
            Debug.Assert(data != null);
            Debug.Assert(data.Length > componentCount * 3);
            Debug.Assert(componentCount > 1);
            Debug.Assert(retryCount >= 0);

            int dimensions = data[0].Count;

            // Find point boundary
            MicrosoftResearch.Infer.Maths.Vector min = data[0].Clone();
            MicrosoftResearch.Infer.Maths.Vector max = min.Clone();
            for (int i = 1; i < data.Length; ++i)
            {
                Debug.Assert(dimensions == data[i].Count);
                for (int j = 0; j < dimensions; ++j)
                {
                    min[j] = Math.Min(min[j], data[i][j]);
                    max[j] = Math.Max(max[j], data[i][j]);
                }
            }

            // Initialize solution
            MicrosoftResearch.Infer.Maths.Vector[] means = new MicrosoftResearch.Infer.Maths.Vector[componentCount];
            PositiveDefiniteMatrix[] covariances         = new PositiveDefiniteMatrix[componentCount];
            for (int i = 0; i < componentCount; ++i)
            {
                GenerateRandomMixtureComponent(min, max, out means[i], out covariances[i]);
            }
            double[] weights = Enumerable.Repeat(1.0 / componentCount, componentCount).ToArray();

            // EM algorithm for GMM
            double[,] expectations = new double[data.Length, componentCount];
            double       lastEstimate;
            const double negativeInfinity = -1e+20;
            bool         convergenceDetected;
            double       currentEstimate = negativeInfinity;

            do
            {
                lastEstimate        = currentEstimate;
                convergenceDetected = false;

                // E-step: estimate expectations on hidden variables
                for (int i = 0; i < data.Length; ++i)
                {
                    double sum = 0;
                    for (int j = 0; j < componentCount; ++j)
                    {
                        expectations[i, j] =
                            Math.Exp(VectorGaussian.GetLogProb(data[i], means[j], covariances[j])) * weights[j];
                        sum += expectations[i, j];
                    }
                    for (int j = 0; j < componentCount; ++j)
                    {
                        expectations[i, j] /= sum;
                    }
                }

                // M-step:

                // Re-estimate means
                for (int j = 0; j < componentCount; ++j)
                {
                    means[j] = MicrosoftResearch.Infer.Maths.Vector.Zero(dimensions);
                    double sum = 0;
                    for (int i = 0; i < data.Length; ++i)
                    {
                        means[j] += data[i] * expectations[i, j];
                        sum      += expectations[i, j];
                    }
                    means[j] *= 1.0 / sum;
                }

                // Re-estimate covariances
                for (int j = 0; j < componentCount; ++j)
                {
                    Matrix covariance = new Matrix(dimensions, dimensions);
                    double sum        = 0;
                    for (int i = 0; i < data.Length; ++i)
                    {
                        MicrosoftResearch.Infer.Maths.Vector dataDiff = data[i] - means[j];
                        covariance += dataDiff.Outer(dataDiff) * expectations[i, j];
                        sum        += expectations[i, j];
                    }
                    covariance    *= 1.0 / sum;
                    covariances[j] = new PositiveDefiniteMatrix(covariance);

                    if (covariances[j].LogDeterminant() < -30)
                    {
                        DebugConfiguration.WriteDebugText("Convergence detected for component {0}", j);
                        if (retryCount == 0)
                        {
                            throw new InvalidOperationException("Can't fit GMM. Retry number exceeded.");
                        }

                        retryCount -= 1;
                        GenerateRandomMixtureComponent(min, max, out means[j], out covariances[j]);
                        DebugConfiguration.WriteDebugText("Component {0} regenerated", j);

                        convergenceDetected = true;
                    }
                }

                if (convergenceDetected)
                {
                    currentEstimate = negativeInfinity;
                    continue;
                }

                // Re-estimate weights
                double expectationSum = 0;
                for (int j = 0; j < componentCount; ++j)
                {
                    weights[j] = 0;
                    for (int i = 0; i < data.Length; ++i)
                    {
                        weights[j]     += expectations[i, j];
                        expectationSum += expectations[i, j];
                    }
                }
                for (int j = 0; j < componentCount; ++j)
                {
                    weights[j] /= expectationSum;
                }

                // Compute likelihood estimate
                currentEstimate = 0;
                for (int i = 0; i < data.Length; ++i)
                {
                    for (int j = 0; j < componentCount; ++j)
                    {
                        currentEstimate +=
                            expectations[i, j] * (VectorGaussian.GetLogProb(data[i], means[j], covariances[j]) + Math.Log(weights[j]));
                    }
                }

                DebugConfiguration.WriteDebugText("L={0:0.000000}", currentEstimate);
            } while (convergenceDetected || (currentEstimate - lastEstimate > tolerance));

            Mixture <VectorGaussian> result = new Mixture <VectorGaussian>();

            for (int j = 0; j < componentCount; ++j)
            {
                result.Add(VectorGaussian.FromMeanAndVariance(means[j], covariances[j]), weights[j]);
            }

            DebugConfiguration.WriteDebugText("GMM successfully fitted.");

            return(result);
        }
Esempio n. 13
0
        /// <summary>
        /// Primary definition of the GPRN model as an Infer.NET model.
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        /// <param name="constrainWpositive">Whether to constrain W to be positive [experimental]</param>
        /// <param name="isotropicNoise">Whether to use isotropic observation noise</param>
        /// <param name="meanFunctions">Whether to include a per output mean function</param>
        /// <param name="initLoglengthscales">Initial values for the length scales of the kernels</param>
        /// <param name="sw">An output file for logging</param>
        public void GPRN_InferNET_model(Vector[] inputs,
                                        double[,] data,
                                        int Q,
                                        bool grid                    = false,
                                        bool[,] missing              = null,
                                        bool nodeFunctionNoise       = false,
                                        bool constrainWpositive      = false,
                                        bool isotropicNoise          = true,
                                        bool meanFunctions           = false,
                                        double[] initLoglengthscales = null,
                                        StreamWriter sw              = null)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0)) + new WhiteNoise(-3);
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            SummationKernel kf_weights = new SummationKernel(new SquaredExponential(1)) + new WhiteNoise(-3);
            var             K_weights  = Utils.GramMatrix(kf_weights, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues  = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValues2 = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            if (constrainWpositive)
            {
                var weightFunctionValuesCopy = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesCopy");
                weightFunctionValuesCopy[d, q][n] = Variable.GaussianFromMeanAndPrecision(weightFunctionValues[d, q][n], 100);
                Variable.ConstrainPositive(weightFunctionValuesCopy[d, q][n]);
            }
            weightFunctionValues2[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            Variable <double>      noisePrecision      = null;
            VariableArray <double> noisePrecisionArray = null;

            if (isotropicNoise)
            {
                noisePrecision = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");
                toInfer.Add(noisePrecision);
            }
            else
            {
                noisePrecisionArray    = Variable.Array <double>(d).Named("noisePrecision");
                noisePrecisionArray[d] = Variable.Random <double, Gamma>(noisePrecisionPrior).ForEach(d);
                toInfer.Add(noisePrecisionArray);
            }

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            VariableArray <VariableArray <double>, double[][]> meanFunctionValues = null;

            if (meanFunctions)
            {
                GPFactor.settings = new Settings
                {
                    solverMethod = Settings.SolverMethod.GradientDescent,
                };

                VariableArray <KernelFunction> kf = Variable.Array <KernelFunction>(d);
                kf.ObservedValue = Enumerable.Range(0, D.ObservedValue).Select(
                    o => new SummationKernel(new SquaredExponential()) + new WhiteNoise(-3)).ToArray();

                var mf = Variable.Array <Vector>(d).Named("meanFunctions");
                mf[d] = Variable <Vector> .Factor <double, Vector[], int[], KernelFunction>(MyFactors.GP, 1.0 /*Variable.GammaFromShapeAndRate(1,1)*/, inputs, new int[] { 0 },
                                                                                            kf[d]);

                mf.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
                meanFunctionValues    = Variable.Array(Variable.Array <double>(n), d).Named("meanFunctionValues");
                meanFunctionValues[d] = Variable.ArrayFromVector(mf[d], n);
                toInfer.Add(meanFunctionValues);
            }

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q] = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    if (meanFunctions)
                    {
                        noiseLessY[d, n] = Variable.Sum(temp) + meanFunctionValues[d][n];
                    }
                    else
                    {
                        noiseLessY[d, n] = Variable.Sum(temp);
                    }
                    using (Variable.IfNot(isMissing[d, n]))
                        if (isotropicNoise)
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                        }
                        else
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecisionArray[d]);
                        }
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValues2
            });

            var infer = new InferenceEngine(new VariationalMessagePassing());

            infer.ModelName = "MeanFunction";
            var ca = infer.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            var kernel = new SummationKernel(new SquaredExponential(initLoglengthscales[0]));

            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            kernel  = new SummationKernel(new SquaredExponential(initLoglengthscales[1]));
            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            ca.Reset();
            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 100; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }
            Console.WriteLine("Finished after " + it);
        }
Esempio n. 14
0
        /// <summary>
        /// An implementation of GPRN specialised for one step look ahead multivariate volatility experiments
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <returns>Predicted covariance for the next time point</returns>
        public VectorGaussian GPRN_MultivariateVolatility(
            Vector[] inputs,
            double[,] data,
            double[] nodeSignalPrecs,
            double[] nodeNoisePrecs,
            double obsNoisePrec,
            ref VectorGaussian[] finit,
            ref VectorGaussian[,] winit,
            KernelFunction nodeKernel,
            KernelFunction weightKernel)
        {
            var missing = new bool[data.GetLength(0), data.GetLength(1)];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                missing[i, data.GetLength(1) - 1] = true; // last data point is missing
            }
            int Q = nodeSignalPrecs.Length;

            var toInfer   = new List <IVariable>();
            var K_node    = Utils.GramMatrix(nodeKernel, inputs);
            var K_weights = Utils.GramMatrix(weightKernel, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");

            nodeSignalPrecisions.ObservedValue = nodeSignalPrecs;

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");

            nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            var nodeNoisePrecisions = Variable.Array <double>(q).Named("nodeNoisePrecisions");

            nodeNoisePrecisions.ObservedValue = nodeNoisePrecs;
            nodeFunctionValues[q][n]          = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues           = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);

            weightFunctionValuesPredictive[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData   = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecision = Variable.Observed(obsNoisePrec).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            var finitNew = finit.Select(i => Utils.extendByOneDimension(i, Gaussian.FromMeanAndVariance(0, 1))).ToArray();

            nodeFunctions.InitialiseTo(Distribution <Vector> .Array(finitNew));

            var winitNew = new VectorGaussian[data.GetLength(0), Q];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                for (int j = 0; j < Q; j++)
                {
                    winitNew[i, j] = Utils.extendByOneDimension(winit[i, j], Gaussian.FromMeanAndVariance(0, 1));
                }
            }

            weightFunctions.InitialiseTo(Distribution <Vector> .Array(winitNew));

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctions, weightFunctions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValuesPredictive                                      /* is this redundant? */
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(nodeKernel, inputs).Inverse());
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(weightKernel, inputs).Inverse());
            ca.Reset();

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 30; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }

            var f = ca.Marginal <Gaussian[][]>("nodeFunctionValuesPredictive");
            var W = ca.Marginal <Gaussian[, ][]>("weightFunctionValuesPredictive");

            finit = ca.Marginal <VectorGaussian[]>(nodeFunctions.NameInGeneratedCode);
            winit = ca.Marginal <VectorGaussian[, ]>(weightFunctions.NameInGeneratedCode);
            return(Utils.CorrelatedPredictionsHelper(f, W, Gamma.PointMass(obsNoisePrec), Q, data.GetLength(0), data.GetLength(1) - 1));
        }
Esempio n. 15
0
        /// <summary>
        /// Infer.NET definition of the Semi Parametric Latent Factor Model of
        /// Teh, Y., Seeger, M., and Jordan, M. (AISTATS 2005).
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        public void SPLFM(
            Vector[] inputs,
            double[,] data,
            int Q,
            bool[,] missing        = null,
            bool nodeFunctionNoise = false)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0));
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weights = Variable.Array <double>(d, q).Named("weights");

            weights[d, q] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(d, q);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            var noisePrecision      = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weights[d, q] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, noisePrecision, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weights
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());

            ie.ModelName = "SPLFM";
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.Execute(100);
            var fvals      = ca.Marginal <Gaussian[][]>(nodeFunctionValues.NameInGeneratedCode)[0]; // [q][n]
            var x          = inputs.Select(i => i[0]).ToArray();
            var mplWrapper = new MatplotlibWrapper();

            mplWrapper.AddArray("x", x);
            mplWrapper.AddArray("y", fvals.Select(i => i.GetMean()).ToArray());
            mplWrapper.AddArray("s", fvals.Select(i => Math.Sqrt(i.GetVariance())).ToArray());

            mplWrapper.Plot(new string[] {
                "fill_between(x,y-s,y+s,color=\"gray\")",
                "ylabel(\"node (fitted)\")"
            });
        }