Beispiel #1
0
        public static void GenData(int n, Vector w, double b,
                                   out Vector[] X, out bool[] Y, int seed = 1)
        {
            Rand.Restart(seed);
            int d = w.Count;

            X = new Vector[n];
            Y = new bool[n];
            if (w == null)
            {
                throw new ArgumentException("coefficient vector w cannot be null");
            }
//			X = new Vector[n];
//			Y = new double[n];

            for (int i = 0; i < n; i++)
            {
                X[i] = Vector.Zero(d);
                // samples are from standard multivariate normal
                Rand.Normal(Vector.Zero(d), PositiveDefiniteMatrix.IdentityScaledBy(d, 1), X[i]);
                // Gamma random noise to each dimension
//				X[i] = Rand.Gamma(1)*X[i];

                double inner = w.Inner(X[i]);
                double p     = MMath.Logistic(inner + b);

//				Y[i] = p >= 0.5 ? 1.0 - ep : 0.0 + ep;
                Y[i] = Bernoulli.Sample(p);
//				Y[i] = p >= 0.5;
            }
        }
Beispiel #2
0
        /// <summary>
        /// Returns the community score matrix prior.
        /// </summary>
        /// <returns>The community score matrix prior.</returns>
        private VectorGaussian[] GetScoreMatrixPrior()
        {
            var dim       = new Range(LabelCount);
            var mean      = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(LabelCount), PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, 1));
            var prec      = Variable.WishartFromShapeAndRate(1.0, PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, 1));
            var score     = Variable.VectorGaussianFromMeanAndPrecision(mean, prec);
            var confusion = Variable.Softmax(score);

            confusion.SetValueRange(dim);
            var confusionConstraint = Variable.New <Dirichlet>();

            Variable.ConstrainEqualRandom(confusion, confusionConstraint);
            var engine = new InferenceEngine(new VariationalMessagePassing())
            {
                ShowProgress = false
            };

            engine.Compiler.WriteSourceFiles = false;
            var scorePrior = new VectorGaussian[LabelCount];

            for (int d = 0; d < LabelCount; d++)
            {
                confusionConstraint.ObservedValue = new Dirichlet(Util.ArrayInit(LabelCount, i => i == d ? (InitialWorkerBelief / (1 - InitialWorkerBelief)) * (LabelCount - 1) : 1.0));
                scorePrior[d] = engine.Infer <VectorGaussian>(score);
            }

            return(scorePrior);
        }
Beispiel #3
0
        /// <summary>
        /// Initializes the CBCC model with a number of communities.
        /// </summary>
        /// <param name="taskCount">The number of tasks.</param>
        /// <param name="labelCount">The number of labels.</param>
        /// <param name="communityCount">The number of communities.</param>
        public virtual void CreateModel(int taskCount, int labelCount, int communityCount)
        {
            Evidence = Variable <bool> .Random(this.EvidencePrior);

            var evidenceBlock = Variable.If(Evidence);

            CommunityCount             = communityCount;
            CommunityProbPriorObserved = Dirichlet.Symmetric(communityCount, CommunityPseudoCount);
            DefineVariablesAndRanges(taskCount, labelCount);
            DefineGenerativeProcess();
            DefineInferenceEngine();
            evidenceBlock.CloseBlock();

            if (ScoreMeanParameters == null)
            {
                var scoreMatrixPrior = GetScoreMatrixPrior();
                CommunityScoreMatrixPriorObserved = Util.ArrayInit(CommunityCount, comm => Util.ArrayInit(labelCount, lab => new VectorGaussian(scoreMatrixPrior[lab])));
            }
            else
            {
                CommunityScoreMatrixPriorObserved = Util.ArrayInit(
                    CommunityCount,
                    comm => Util.ArrayInit(
                        labelCount, lab => VectorGaussian.FromMeanAndPrecision(
                            Vector.FromArray(
                                Util.ArrayInit(labelCount, lab1 => lab == lab1 ? ScoreMeanParameters[comm].Item1 : ScoreMeanParameters[comm].Item2)),
                            PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, ScorePrecisionParameters[comm]))));
            }
        }
Beispiel #4
0
        /// <summary>
        /// For the multinomial regression model: generate synthetic data,
        /// infer the model parameters and calculate the RMSE between the true
        /// and mean inferred coefficients.
        /// </summary>
        /// <param name="numSamples">Number of samples</param>
        /// <param name="numFeatures">Number of input features</param>
        /// <param name="numClasses">Number of classes</param>
        /// <param name="countPerSample">Total count per sample</param>
        /// <returns>RMSE between the true and mean inferred coefficients</returns>
        public double MultinomialRegressionSynthetic(
            int numSamples, int numFeatures, int numClasses, int countPerSample, double noiseVar = 0.0)
        {
            var features     = new Vector[numSamples];
            var counts       = new int[numSamples][];
            var coefficients = new Vector[numClasses];
            var bias         = Vector.Zero(numClasses);

            Rand.Restart(1);
            for (int i = 0; i < numClasses - 1; i++)
            {
                bias[i]         = Rand.Normal();
                coefficients[i] = Vector.Zero(numFeatures);
                Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]);
            }
            bias[numClasses - 1]         = 0;
            coefficients[numClasses - 1] = Vector.Zero(numFeatures);
            var noiseDistribution = new VectorGaussian(Vector.Zero(numClasses), PositiveDefiniteMatrix.IdentityScaledBy(numClasses, noiseVar));

            for (int i = 0; i < numSamples; i++)
            {
                features[i] = Vector.Zero(numFeatures);
                Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]);
                var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray());
                if (noiseVar != 0.0)
                {
                    temp += noiseDistribution.Sample();
                }
                var p = MMath.Softmax(temp + bias);
                counts[i] = Rand.Multinomial(countPerSample, p);
            }

            IList <VectorGaussian> weightsPost;
            IList <Gaussian>       biasPost;
            bool   trackLowerBound = true;
            double ev = MultinomialRegression(features, counts, out weightsPost, out biasPost, trackLowerBound);

            if (trackLowerBound)
            {
                Console.WriteLine("Log lower bound= " + ev);
            }
            double error = 0;

            Console.WriteLine("Weights -------------- ");
            for (int i = 0; i < numClasses; i++)
            {
                var bMean = weightsPost[i].GetMean();
                error += (bMean - coefficients[i]).Sum(o => o * o);
                Console.WriteLine("Class " + i + " True " + coefficients[i]);
                Console.WriteLine("Class " + i + " Inferred " + bMean);
            }
            error = System.Math.Sqrt(error / (numClasses * numFeatures));
            Console.WriteLine("RMSE " + error);
            Console.WriteLine("Bias -------------- ");
            Console.WriteLine("True " + bias);
            Console.WriteLine("Inferred " + Vector.FromArray(biasPost.Select(o => o.GetMean()).ToArray()));
            return(error);
        }
Beispiel #5
0
        public ISuff GetSuffStat()
        {
            Vector moment1 = GetMeanVector();
            double m2      = variance + mean * mean;
            PositiveDefiniteMatrix moment2 = PositiveDefiniteMatrix.IdentityScaledBy(1, m2);
            SuffStat s = new SuffStat(moment1, moment2);

            return(s);
        }
Beispiel #6
0
        public void Run()
        {
            // Define a range for the number of mixture components
            Range k = new Range(2).Named("k");

            // Mixture component means
            VariableArray <Vector> means = Variable.Array <Vector>(k).Named("means");

            means[k] = Variable.VectorGaussianFromMeanAndPrecision(
                Vector.FromArray(0.0, 0.0),
                PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture component precisions
            VariableArray <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(k).Named("precs");

            precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture weights
            Variable <Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights");

            // Create a variable array which will hold the data
            Range n = new Range(300).Named("n");
            VariableArray <Vector> data = Variable.Array <Vector>(n).Named("x");

            // Create latent indicator variable for each data point
            VariableArray <int> z = Variable.Array <int>(n).Named("z");

            // The mixture of Gaussians model
            using (Variable.ForEach(n))
            {
                z[n] = Variable.Discrete(weights);
                using (Variable.Switch(z[n]))
                {
                    data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
                }
            }

            // Attach some generated data
            data.ObservedValue = GenerateData(n.SizeAsInt);

            // Initialise messages randomly so as to break symmetry
            Discrete[] zinit = new Discrete[n.SizeAsInt];
            for (int i = 0; i < zinit.Length; i++)
            {
                zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt);
            }

            z.InitialiseTo(Distribution <int> .Array(zinit));

            // The inference
            InferenceEngine ie = new InferenceEngine();

            Console.WriteLine("Dist over pi=" + ie.Infer(weights));
            Console.WriteLine("Dist over means=\n" + ie.Infer(means));
            Console.WriteLine("Dist over precs=\n" + ie.Infer(precs));
        }
        public double MulticlassRegressionSynthetic(int numSamples, object softmaxOperator, out int iterations, out double lowerBound, double noiseVar = 0.0)
        {
            int numFeatures  = 6;
            int numClasses   = 4;
            var features     = new Vector[numSamples];
            var counts       = new int[numSamples];
            var coefficients = new Vector[numClasses];
            var mean         = Vector.Zero(numClasses);

            for (int i = 0; i < numClasses - 1; i++)
            {
                mean[i]         = Rand.Normal();
                coefficients[i] = Vector.Zero(numFeatures);
                Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), coefficients[i]);
            }
            mean[numClasses - 1]         = 0;
            coefficients[numClasses - 1] = Vector.Zero(numFeatures);
            var noiseDistribution = new VectorGaussian(Vector.Zero(numClasses), PositiveDefiniteMatrix.IdentityScaledBy(numClasses, noiseVar));

            for (int i = 0; i < numSamples; i++)
            {
                features[i] = Vector.Zero(numFeatures);
                Rand.Normal(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures), features[i]);
                var temp = Vector.FromArray(coefficients.Select(o => o.Inner(features[i])).ToArray());
                if (noiseVar != 0.0)
                {
                    temp += noiseDistribution.Sample();
                }
                var p = MMath.Softmax(temp + mean);
                counts[i] = Rand.Sample(p);
            }

            Rand.Restart(DateTime.Now.Millisecond);
            VectorGaussian[] bPost;
            Gaussian[]       meanPost;
            iterations = MulticlassRegression(features, counts, numClasses, out bPost, out meanPost, out lowerBound, softmaxOperator, true);
            var    bMeans = bPost.Select(o => o.GetMean()).ToArray();
            var    bVars  = bPost.Select(o => o.GetVariance()).ToArray();
            double error  = 0;

            Console.WriteLine("Coefficients -------------- ");
            for (int i = 0; i < numClasses; i++)
            {
                error += (bMeans[i] - coefficients[i]).Sum(o => o * o);
                Console.WriteLine("True " + coefficients[i]);
                Console.WriteLine("Inferred " + bMeans[i]);
            }
            Console.WriteLine("Mean -------------- ");
            Console.WriteLine("True " + mean);
            Console.WriteLine("Inferred " + Vector.FromArray(meanPost.Select(o => o.GetMean()).ToArray()));

            error = System.Math.Sqrt(error / (numClasses * numFeatures));
            Console.WriteLine(numSamples + " " + error);
            return(error);
        }
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SumVectorGaussianOp"]/message_doc[@name="SumAverageConditional(IList{VectorGaussian}, VectorGaussian)"]/*'/>
        public static VectorGaussian SumAverageConditional([SkipIfAnyUniform] IList <VectorGaussian> array, VectorGaussian result)
        {
            if (array == null)
            {
                throw new ArgumentNullException(nameof(array));
            }

            if (result == null)
            {
                throw new ArgumentNullException(nameof(result));
            }

            if (array.Count < 1)
            {
                result.Point = Vector.Zero(result.Dimension);
                return(result);
            }

            if (array.Any(element => element == null))
            {
                throw new ArgumentNullException(nameof(array));
            }

            int dimension = result.Dimension;

            if (array.Any(element => element.Dimension != dimension))
            {
                throw new ArgumentException("The result and all elements of the array must have the same number of dimensions.");
            }

            var sumMean         = Vector.Zero(dimension);
            var sumVariance     = PositiveDefiniteMatrix.IdentityScaledBy(dimension, 0);
            var elementMean     = Vector.Zero(dimension);
            var elementVariance = PositiveDefiniteMatrix.Identity(dimension);

            foreach (var element in array)
            {
                if (!element.IsProper())
                {
                    return(element);
                }

                element.GetMeanAndVariance(elementMean, elementVariance);

                sumMean.SetToSum(sumMean, elementMean);
                sumVariance.SetToSum(sumVariance, elementVariance);
            }

            result.SetMeanAndVariance(sumMean, sumVariance);

            return(result);
        }
Beispiel #9
0
        /// <summary>
        /// The entry point of the program, where the program control starts and ends.
        /// </summary>
        public static void Main()
        {
            const int  T = 100;
            const int  K = 2;
            const int  N = 5;
            const bool showFactorGraph = false;

            TestHMM <ContinousHMM, double, double, Gaussian, Gaussian, double, Gamma, double>(
                T,
                K,
                1,
                Gaussian.FromMeanAndPrecision,
                () => Gaussian.FromMeanAndVariance(0, 1000),
                () => Gamma.FromShapeAndScale(1000, 0.001),
                showFactorGraph);

            // TModel, TEmit, TEmitDist, TEmitMeanDist, TEmitMean, TEmitPrecDist, TEmitPrec
            TestHMM <MultivariateHMM, Vector, Vector, VectorGaussian, VectorGaussian, Vector, Wishart, PositiveDefiniteMatrix>(
                T,
                K,
                1,
                VectorGaussian.FromMeanAndPrecision,
                () => VectorGaussian.FromMeanAndVariance(Vector.Zero(N), PositiveDefiniteMatrix.IdentityScaledBy(N, 1000)),
                () => Wishart.FromShapeAndScale(N, PositiveDefiniteMatrix.IdentityScaledBy(N, 0.001)),
                showFactorGraph);

            TestHMM <BinaryHMM, bool, double, Bernoulli, Beta, double, Beta, double>(
                T,
                K,
                1,
                (m, p) => new Bernoulli(m),
                () => new Beta(1, 1),
                null,
                showFactorGraph);

            TestHMM <DiscreteHMM, int, double, Discrete, Dirichlet, Vector, Dirichlet, Vector>(
                T,
                K,
                N,
                (m, p) => new Discrete(m),
                () => Dirichlet.Uniform(N),
                null,
                showFactorGraph);

            // TestBinaryHiddenMarkovModel();
            // TestDiscreteHiddenMarkovModel();
            // TestMultivariateHMM();
        }
Beispiel #10
0
        /// <summary>
        /// Sets the priors of CBCC.
        /// </summary>
        /// <param name="workerCount">The number of workers.</param>
        /// <param name="priors">The priors.</param>
        protected override void SetPriors(int workerCount, BCCPosteriors priors)
        {
            int communityCount = m.SizeAsInt;
            int labelCount     = c.SizeAsInt;

            WorkerCount.ObservedValue = workerCount;
            NoiseMatrix.ObservedValue = PositiveDefiniteMatrix.IdentityScaledBy(labelCount, NoisePrecision);
            CBCCPosteriors cbccPriors = (CBCCPosteriors)priors;

            if (cbccPriors == null || cbccPriors.BackgroundLabelProb == null)
            {
                BackgroundLabelProbPrior.ObservedValue = Dirichlet.Uniform(labelCount);
            }
            else
            {
                BackgroundLabelProbPrior.ObservedValue = cbccPriors.BackgroundLabelProb;
            }

            if (cbccPriors == null || cbccPriors.CommunityProb == null)
            {
                CommunityProbPrior.ObservedValue = CommunityProbPriorObserved;
            }
            else
            {
                CommunityProbPrior.ObservedValue = cbccPriors.CommunityProb;
            }

            if (cbccPriors == null || cbccPriors.CommunityScoreMatrix == null)
            {
                CommunityScoreMatrixPrior.ObservedValue = CommunityScoreMatrixPriorObserved;
            }
            else
            {
                CommunityScoreMatrixPrior.ObservedValue = cbccPriors.CommunityScoreMatrix;
            }

            if (cbccPriors == null || cbccPriors.TrueLabelConstraint == null)
            {
                TrueLabelConstraint.ObservedValue = Util.ArrayInit(TaskCount, t => Discrete.Uniform(labelCount));
            }
            else
            {
                TrueLabelConstraint.ObservedValue = cbccPriors.TrueLabelConstraint;
            }
        }
        private static void GenerateRandomMixtureComponent(
            MicrosoftResearch.Infer.Maths.Vector min, MicrosoftResearch.Infer.Maths.Vector max, out MicrosoftResearch.Infer.Maths.Vector mean, out PositiveDefiniteMatrix covariance)
        {
            Debug.Assert(min != null && max != null);
            Debug.Assert(min.Count == max.Count);

            MicrosoftResearch.Infer.Maths.Vector diff = max - min;

            mean = MicrosoftResearch.Infer.Maths.Vector.Zero(min.Count);
            for (int i = 0; i < min.Count; ++i)
            {
                mean[i] = min[i] + diff[i] * Random.Double();
            }

            covariance = PositiveDefiniteMatrix.IdentityScaledBy(
                min.Count,
                MicrosoftResearch.Infer.Maths.Vector.InnerProduct(diff, diff) / 16);
        }
Beispiel #12
0
        public static VectorGaussian AAverageConditional(
            Vector sumMean, PositiveDefiniteMatrix sumVariance,
            Vector bMean, PositiveDefiniteMatrix bVariance,
            VectorGaussian result)
        {
            if (result == null)
            {
                throw new ArgumentNullException(nameof(result));
            }

            int dimension = result.Dimension;

            var aMean     = Vector.Zero(dimension);
            var aVariance = PositiveDefiniteMatrix.IdentityScaledBy(dimension, 0);

            aMean.SetToDifference(sumMean, bMean);
            aVariance.SetToSum(sumVariance, bVariance);

            result.SetMeanAndVariance(aMean, aVariance);
            return(result);
        }
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="SumVectorGaussianOp"]/message_doc[@name="ArrayAverageConditional{TVectorGaussianList}(VectorGaussian, VectorGaussian, IList{VectorGaussian}, TVectorGaussianList)"]/*'/>
        public static TVectorGaussianList ArrayAverageConditional <TVectorGaussianList>(
            [SkipIfUniform] VectorGaussian sum,
            [Fresh] VectorGaussian to_sum,
            IList <VectorGaussian> array,
            TVectorGaussianList result)
            where TVectorGaussianList : IList <VectorGaussian>, SettableToUniform
        {
            // Check inputs for consistency
            int dimension = CheckArgumentConsistency(sum, to_sum, array, result);

            if (array.Count == 0)
            {
                return(result);
            }

            // It is tempting to put SkipIfAllUniform on array but this isn't correct if the array has one element
            if (array.Count == 1)
            {
                result[0].SetTo(sum);
                return(result);
            }

            if (!sum.IsProper())
            {
                foreach (VectorGaussian element in result)
                {
                    element.SetTo(sum);
                }

                return(result);
            }

            var elementMean     = Vector.Zero(dimension);
            var elementVariance = PositiveDefiniteMatrix.Identity(dimension);

            // Check if an element of the array is uniform
            int indexOfUniform = -1;

            for (int i = 0; i < array.Count; i++)
            {
                array[i].GetMeanAndVariance(elementMean, elementVariance);

                // Instead of testing IsUniform, we need to test the more strict requirement that all diagonal
                // elements of variance are infinite due to the way we are doing the computations
                if (IsUniform(elementVariance))
                {
                    if (indexOfUniform >= 0)
                    {
                        // More than one element of array is uniform
                        result.SetToUniform();
                        return(result);
                    }

                    indexOfUniform = i;
                }
            }

            Vector sumMean = Vector.Zero(dimension);
            PositiveDefiniteMatrix sumVariance = PositiveDefiniteMatrix.Identity(dimension);

            sum.GetMeanAndVariance(sumMean, sumVariance);

            Vector totalMean = Vector.Zero(sum.Dimension);
            PositiveDefiniteMatrix totalVariance = PositiveDefiniteMatrix.IdentityScaledBy(sum.Dimension, 0.0);

            if (indexOfUniform >= 0)
            {
                // Exactly one element of array is uniform
                for (int i = 0; i < array.Count; i++)
                {
                    if (i == indexOfUniform)
                    {
                        continue;
                    }

                    array[i].GetMeanAndVariance(elementMean, elementVariance);
                    totalMean.SetToSum(totalMean, elementMean);
                    totalVariance.SetToSum(totalVariance, elementVariance);
                    result[i].SetToUniform();
                }

                // totalMean = sum_{i except indexOfUniform} array[i].GetMean()
                // totalVariance = sum_{i except indexOfUniform} array[i].GetVariance()
                totalMean.SetToDifference(sumMean, totalMean);
                totalVariance.SetToSum(sumVariance, totalVariance);
                result[indexOfUniform].SetMeanAndVariance(totalMean, totalVariance);
                return(result);
            }

            // At this point, the array has no uniform elements

            // Get the mean and variance of sum of all Gaussians
            to_sum.GetMeanAndVariance(totalMean, totalVariance);

            // Subtract it off from the mean and variance of the incoming Gaussian from Sum
            totalMean.SetToDifference(sumMean, totalMean);
            totalVariance.SetToSum(totalVariance, sumVariance);

            for (int i = 0; i < array.Count; i++)
            {
                array[i].GetMeanAndVariance(elementMean, elementVariance);
                elementMean.SetToSum(elementMean, totalMean);
                elementVariance.SetToDifference(totalVariance, elementVariance);
                result[i].SetMeanAndVariance(elementMean, elementVariance);
            }

            return(result);
        }
Beispiel #14
0
 public Matrix GetCovarianceMatrix()
 {
     return(PositiveDefiniteMatrix.IdentityScaledBy(1, gam.GetVariance()));
 }
Beispiel #15
0
        /// <summary>
        /// Run our VB implementation of the Semi Parametric Latent Factor Model of
        /// Teh, Y., Seeger, M., and Jordan, M. (AISTATS 2005).
        /// </summary>
        public SPLFM_VMP RunSPLFM_VMP(Vector[] inputs,
                                      double[,] data,
                                      bool[,] isMissing,
                                      Settings settings,
                                      string[] errorMeasureNames = null,
                                      Converter <IPredictionSPLFMModel, double[]> modelAssessor = null,
                                      string swfn = null)
        {
            var model = new SPLFM_VMP();

            var nodeOptimiser = new KernelOptimiser(settings);

            nodeOptimiser.xData = inputs;

            nodeOptimiser.kernel           = ObjectCloner.Clone(settings.node_kernel);
            nodeOptimiser.hypersToOptimise = settings.nodeHypersToOptimise;

            var nodeFunctionsInit = Enumerable.Range(0, settings.Q).Select(i =>
                                                                           VectorGaussian.FromMeanAndVariance(
                                                                               VectorGaussian.Sample(Vector.Zero(data.GetLength(1)), PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), 100)),
                                                                               PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), settings.init_precision))).ToArray(); // should put this manually in generated code
            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            double inputsRange
                = inputs.Select(i => i[0]).Max() - inputs.Select(i => i[0]).Min();

            Console.WriteLine("Init node kernel {0}", settings.node_kernel);

            model.SetObservedValue("D", data.GetLength(0));
            model.SetObservedValue("Q", settings.Q);
            model.SetObservedValue("N", data.GetLength(1));
            model.SetObservedValue("observedData", data);
            model.SetObservedValue("nodeFunctionsInitVar", distArray);
            model.SetObservedValue("K_node_inverse", Utils.GramMatrix(nodeOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("noisePrecisionPrior", settings.noisePrecisionPrior);
            //model.SetObservedValue("nodeNoisePrecisionPrior", settings.nodeNoisePrecisionPrior);
            model.SetObservedValue("nodeSignalPrecisionsPrior", Enumerable.Range(0, settings.Q).Select(o => settings.nodeSignalPrecisionsPrior).ToArray());
            model.SetObservedValue("isMissing", isMissing);

            model.nodeKernelOptimiser = nodeOptimiser;

            model.Reset();

            var start = DateTime.Now;

            if (swfn != null)
            {
                using (var sw = new StreamWriter(swfn, true))
                {
                    sw.Write("{0} {1} {2}", "it", "time", "ml");
                    if (errorMeasureNames != null)
                    {
                        sw.Write(" " + errorMeasureNames.Aggregate((p, q) => p + " " + q));
                    }
                    sw.Write(" " + Utils.KernelHyperNames(nodeOptimiser.kernel).Select(o => "node_" + o).Aggregate((p, q) => p + " " + q));

                    sw.Write(" noise");
                    for (int i = 0; i < settings.Q; i++)
                    {
                        sw.Write(" signal" + i);
                    }
                    sw.WriteLine();
                }
            }

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < settings.max_iterations; it++)
            {
                model.Update(1);
                ml = model.Marginal <Bernoulli>("ev").LogOdds;

                var noisePrecisionPost = model.Marginal <Gamma>("noisePrecision");

                var assessment = (modelAssessor != null) ? modelAssessor(model).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q) : "";

                Console.WriteLine("It " + it + " node " + nodeOptimiser.kernel + " ml " + ml + " err  " + assessment);
                if (Math.Abs(oldML - ml) < settings.ml_tolerance)
                {
                    break;
                }

                oldML = ml;

                if (swfn != null)
                {
                    using (var sw = new StreamWriter(swfn, true))
                    {
                        var nodeSignalPrecisionsPost = model.Marginal <Gamma[]>("nodeSignalPrecisions");

                        sw.Write("{0} {1} {2}", it, (DateTime.Now - start).TotalMilliseconds, ml);
                        if (modelAssessor != null)
                        {
                            sw.Write(" " + assessment);
                        }
                        sw.Write(" " + Utils.KernelToArray(nodeOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + noisePrecisionPost.GetMeanInverse());
                        for (int i = 0; i < settings.Q; i++)
                        {
                            sw.Write(" " + nodeSignalPrecisionsPost[i].GetMeanInverse());
                        }
                        sw.WriteLine();
                    }
                }
            }


            Console.WriteLine("Finished after " + it);

            return(model);
        }
Beispiel #16
0
        public void MixtureOfMultivariateGaussians()
        {
            // Define a range for the number of mixture components
            Range k = new Range(2).Named("k");

            // Mixture component means
            VariableArray <Vector> means = Variable.Array <Vector>(k).Named("means");

            means[k] = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(2), PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture component precisions
            VariableArray <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(k).Named("precs");

            precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture weights
            Variable <Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights");

            // Create a variable array which will hold the data
            Range n = new Range(300).Named("n");
            VariableArray <Vector> data = Variable.Array <Vector>(n).Named("x");
            // Create latent indicator variable for each data point
            VariableArray <int> z = Variable.Array <int>(n).Named("z");

            // The mixture of Gaussians model
            using (Variable.ForEach(n))
            {
                z[n] = Variable.Discrete(weights);
                using (Variable.Switch(z[n]))
                {
                    data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
                }
            }

            // Attach some generated data
            double truePi = 0.6;

            data.ObservedValue = GenerateData(n.SizeAsInt, truePi);

            // Initialise messages randomly so as to break symmetry
            Discrete[] zinit = new Discrete[n.SizeAsInt];
            for (int i = 0; i < zinit.Length; i++)
            {
                zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt);
            }
            z.InitialiseTo(Distribution <int> .Array(zinit));

            // The inference
            InferenceEngine ie = new InferenceEngine();

            ie.Algorithm = new VariationalMessagePassing();
            //ie.Compiler.GenerateInMemory = false;
            //ie.NumberOfIterations = 200;
            Dirichlet wDist    = (Dirichlet)ie.Infer(weights);
            Vector    wEstMean = wDist.GetMean();

            object meansActual = ie.Infer(means);

            Console.WriteLine("means = ");
            Console.WriteLine(meansActual);
            var precsActual = ie.Infer <IList <Wishart> >(precs);

            Console.WriteLine("precs = ");
            Console.WriteLine(precsActual);
            Console.WriteLine("w = {0} should be {1}", wEstMean, Vector.FromArray(truePi, 1 - truePi));
            //Console.WriteLine(StringUtil.JoinColumns("z = ", ie.Infer(z)));
            Assert.True(
                MMath.AbsDiff(wEstMean[0], truePi) < 0.05 ||
                MMath.AbsDiff(wEstMean[1], truePi) < 0.05);
        }
        static void Main(string[] args)
        {
            // x_0, x_1
            double[][] data = new double[40][] {
                new double []   { 1.579866, 5.113884 },
                new double []   { 2.596718, 5.731915 },
                new double []   { 2.250065, 5.431592 },
                new double []   { 1.670613, 4.528363 },
                new double []   { 1.819446, 4.941831 },
                new double []   { 2.662797, 5.065860 },
                new double []   { 1.999470, 5.029127 },
                new double []   { 0.871543, 4.512580 },
                new double []   { 1.751953, 4.926783 },
                new double []   { 2.765513, 5.743687 },
                new double []   { 1.855384, 4.789793 },
                new double []   { 3.096498, 6.281974 },
                new double []   { 2.071130, 4.893247 },
                new double []   { 1.920525, 4.957070 },
                new double []   { 1.040348, 3.779229 },
                new double []   { 2.128256, 4.971110 },
                new double []   { 1.865504, 4.909423 },
                new double []   { 2.214581, 5.388266 },
                new double []   { 2.679222, 5.509920 },
                new double []   { 2.511456, 5.758537 },
                new double []   { 1.373862, 3.160759 },
                new double []   { 1.089823, 3.321712 },
                new double []   { 1.179304, 3.063109 },
                new double []   { 1.222694, 3.319575 },
                new double []   { 1.583625, 3.028068 },
                new double []   { 1.813981, 2.564715 },
                new double []   { 1.764641, 2.498540 },
                new double []   { 1.942168, 2.343563 },
                new double []   { 1.496726, 3.097716 },
                new double []   { 1.248391, 3.171456 },
                new double []   { 1.167128, 3.165683 },
                new double []   { 1.776658, 2.666594 },
                new double []   { 1.024596, 3.529081 },
                new double []   { 1.495685, 3.055696 },
                new double []   { 0.863904, 3.651468 },
                new double []   { 1.814797, 2.607492 },
                new double []   { 1.796724, 2.754365 },
                new double []   { 0.927581, 3.627839 },
                new double []   { 1.544352, 2.947286 },
                new double []   { 1.400211, 3.180880 }
            };

            Vector[] normalData    = new Vector[20];
            Vector[] anomalousData = new Vector[20];

            for (int i = 0; i < 20; i++)
            {
                double[] input = new double[2] {
                    data[i][0], data[i][1]
                };
                normalData[i] = Vector.FromArray(input);
            }

            for (int i = 0; i < 20; i++)
            {
                double[] input = new double[2] {
                    data[i + 19][0], data[i + 19][1]
                };
                anomalousData[i] = Vector.FromArray(input);
            }

            Range k = new Range(2);

            VariableArray <Vector> means = Variable.Array <Vector>(k);

            means[k] = Variable.VectorGaussianFromMeanAndPrecision(Vector.FromArray(0.0, 0.0), PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            VariableArray <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(k);

            precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // define the Dirichlet prior over the normal data points (concentrate on class 0)
            Variable <Vector> weightsNormalPoints = Variable.Dirichlet(k, new double[] { 1, 0.1 });

            // define range for the normal data
            Range n = new Range(20);

            // define the x Gaussian random variables we will observe as normal data points
            VariableArray <Vector> xNormalPoints = Variable.Array <Vector>(n);

            // define latent z for all the normal data points
            VariableArray <int> zNormalPoints = Variable.Array <int>(n);

            using (Variable.ForEach(n))
            {
                zNormalPoints[n] = Variable.Discrete(weightsNormalPoints);

                using (Variable.Switch(zNormalPoints[n]))
                {
                    xNormalPoints[n] = Variable.VectorGaussianFromMeanAndPrecision(means[zNormalPoints[n]], precs[zNormalPoints[n]]);
                }
            }

            // define range for the anomalous data
            Range m = new Range(20);

            // define the x Gaussian random variables we will observe as anomalous data points
            VariableArray <Vector> xAnomalousPoints = Variable.Array <Vector>(m);

            // define the Dirichlet prior over the anomalous data points (concentrate on class 1)
            Variable <Vector> weightsAnomalousPoints = Variable.Dirichlet(k, new double[] { 0.1, 1 });

            // define latent z for all the anomalous data points
            VariableArray <int> zAnomalousPoints = Variable.Array <int>(m);

            using (Variable.ForEach(m))
            {
                zAnomalousPoints[m] = Variable.Discrete(weightsAnomalousPoints);

                using (Variable.Switch(zAnomalousPoints[m]))
                {
                    xAnomalousPoints[m] = Variable.VectorGaussianFromMeanAndPrecision(means[zAnomalousPoints[m]], precs[zAnomalousPoints[m]]);
                }
            }

            xNormalPoints.ObservedValue    = normalData;
            xAnomalousPoints.ObservedValue = anomalousData;

            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            //var weightsPost = ie.Infer<Dirichlet>(weightsNormalPoints);
            //var anomalyWeightsPost = ie.Infer<Dirichlet>(weightsAnomalousPoints);
            //var meansPost = ie.Infer<VectorGaussian[]>(means);
            //var precsPost = ie.Infer<Wishart[]>(precs);

            //Console.WriteLine("Dist over pi=" + weightsPost.GetMean());
            //Console.WriteLine("Dist over pi=" + anomalyWeightsPost.GetMean());
            //Console.WriteLine("Dist over means 0 =\n" + meansPost[0]);
            //Console.WriteLine("Dist over means 1 =\n" + meansPost[1]);
            //Console.WriteLine("Dist over precs 0 =\n" + precsPost[0].GetMean().Inverse());
            //Console.WriteLine("Dist over precs 1 =\n" + precsPost[1].GetMean().Inverse());


            // now let's try making a prediction with an "unseen" data point x
            Range xn = new Range(1);

            VariableArray <Vector> xNew = Variable.Array <Vector>(xn);

            VariableArray <int> zNew = Variable.Array <int>(xn);

            // define a uniform Dirichlet prior over the latent z
            Variable <Vector> zNewWeights = Variable.Dirichlet(k, new double[] { 1, 1 });

            using (Variable.ForEach(xn))
            {
                zNew[xn] = Variable.Discrete(zNewWeights);

                using (Variable.Switch(zNew[xn]))
                {
                    xNew[xn] = Variable.VectorGaussianFromMeanAndPrecision(means[zNew[xn]], precs[zNew[xn]]);
                }
            }

            Vector[] xNewData = new Vector[1];

            for (int i = 0; i < 1; i++)
            {
                double[] input = new double[2] {
                    3, 6
                };
                xNewData[i] = Vector.FromArray(input);
            }

            xNew.ObservedValue = xNewData;

            var zNewPost = ie.Infer <Discrete[]>(zNew);

            Console.WriteLine(zNewPost[0]);
        }
Beispiel #18
0
        /// <summary>
        /// Run GPRN without node noise
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="settings">Algorithm settings</param>
        /// <param name="swfn">Filename for logging</param>
        /// <returns>Fitted model</returns>
        public GPRN_VMP NetworkModelCA(Vector[] inputs,
                                       double[,] data,
                                       Settings settings,
                                       string swfn = null)
        {
            bool anyIsMissing = false; // AnyIsMissing(isMissing);

            var model = new GPRN_VMP();

            var nodeOptimiser   = new KernelOptimiser(settings);
            var weightOptimiser = new KernelOptimiser(settings);

            nodeOptimiser.xData   = inputs;
            weightOptimiser.xData = inputs;

            nodeOptimiser.kernel             = ObjectCloner.Clone(settings.node_kernel);
            nodeOptimiser.hypersToOptimise   = settings.nodeHypersToOptimise;
            weightOptimiser.kernel           = ObjectCloner.Clone(settings.weight_kernel);
            weightOptimiser.hypersToOptimise = settings.weightHypersToOptimise;

            var nodeFunctionsInit = Enumerable.Range(0, settings.Q).Select(i =>
                                                                           VectorGaussian.FromMeanAndVariance(
                                                                               VectorGaussian.Sample(Vector.Zero(data.GetLength(1)), PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), 100)),
                                                                               PositiveDefiniteMatrix.IdentityScaledBy(data.GetLength(1), settings.init_precision))).ToArray(); // should put this manually in generated code
            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            double inputsRange
                = inputs.Select(i => i[0]).Max() - inputs.Select(i => i[0]).Min();

            Console.WriteLine("Init node kernel {0}\ninit weight kernel {1}", settings.node_kernel, settings.weight_kernel);

            model.SetObservedValue("D", data.GetLength(0));
            model.SetObservedValue("Q", settings.Q);
            model.SetObservedValue("N", data.GetLength(1));
            model.SetObservedValue("observedData", data);
            model.SetObservedValue("nodeFunctionsInitVar", distArray);
            model.SetObservedValue("K_node_inverse", Utils.GramMatrix(nodeOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("K_weights_inverse", Utils.GramMatrix(weightOptimiser.kernel, inputs).Inverse());
            model.SetObservedValue("noisePrecisionPrior", settings.noisePrecisionPrior);
            //model.SetObservedValue("nodeNoisePrecisionPrior", settings.nodeNoisePrecisionPrior);
            model.SetObservedValue("nodeSignalPrecisionsPrior", settings.nodeSignalPrecisionsPrior);
            //model.SetObservedValue("isMissing", isMissing);

            model.nodeKernelOptimiser   = nodeOptimiser;
            model.weightKernelOptimiser = weightOptimiser;

            model.Reset();

            var start = DateTime.Now;

            if (swfn != null)
            {
                using (var sw = new StreamWriter(swfn, true))
                {
                    sw.Write("{0} {1} {2}", "it", "time", "ml");
                    if (anyIsMissing)
                    {
                        sw.Write(" {0} {1}", "logProb", "error");
                    }
                    sw.Write(" " + Utils.KernelHyperNames(nodeOptimiser.kernel).Select(o => "node_" + o).Aggregate((p, q) => p + " " + q));
                    sw.Write(" " + Utils.KernelHyperNames(weightOptimiser.kernel).Select(o => "weight_" + o).Aggregate((p, q) => p + " " + q));
                    sw.Write(" noise");
                    for (int i = 0; i < settings.Q; i++)
                    {
                        sw.Write(" signal" + i);
                    }

                    sw.WriteLine();
                }
            }

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < settings.max_iterations; it++)
            {
                model.Update(1);
                ml = model.Marginal <Bernoulli>("ev").LogOdds;
                var    noisePrecisionPost = model.Marginal <Gamma>("noisePrecision");
                double logProb = 0, error = 0, MSLL = 0, SMSE = 0;

                Console.WriteLine("It {9} Time: {8:G3} Node ls=exp({0:G3})={1:G3} Weight ls=exp({2:G3})={3:G3} ml={4:G3} error={5:G3} msll={6:G3} smse={7:G3}", nodeOptimiser.kernel[0], Math.Exp(nodeOptimiser.kernel[0]),
                                  weightOptimiser.kernel[0], Math.Exp(weightOptimiser.kernel[0]), ml, error, MSLL, SMSE, (DateTime.Now - start).TotalMilliseconds, it);
                if (Math.Abs(oldML - ml) < settings.ml_tolerance)
                {
                    break;
                }

                oldML = ml;

                if (swfn != null)
                {
                    using (var sw = new StreamWriter(swfn, true))
                    {
                        var nodeSignalPrecisionsPost = model.Marginal <Gamma[]>("nodeSignalPrecisions");

                        sw.Write("{0} {1} {2}", it, (DateTime.Now - start).TotalMilliseconds, ml);
                        if (anyIsMissing)
                        {
                            sw.Write(" {0} {1}", logProb, error);
                        }
                        sw.Write(" " + Utils.KernelToArray(nodeOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + Utils.KernelToArray(weightOptimiser.kernel).Select(o => o.ToString()).Aggregate((p, q) => p + " " + q));
                        sw.Write(" " + noisePrecisionPost.GetMeanInverse());
                        for (int i = 0; i < settings.Q; i++)
                        {
                            sw.Write(" " + nodeSignalPrecisionsPost[i].GetMeanInverse());
                        }
                        sw.WriteLine();
                    }
                }
            }


            Console.WriteLine("Finished after " + it);

            return(model);
        }
Beispiel #19
0
        /// <summary>
        /// An implementation of GPRN specialised for one step look ahead multivariate volatility experiments
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <returns>Predicted covariance for the next time point</returns>
        public VectorGaussian GPRN_MultivariateVolatility(
            Vector[] inputs,
            double[,] data,
            double[] nodeSignalPrecs,
            double[] nodeNoisePrecs,
            double obsNoisePrec,
            ref VectorGaussian[] finit,
            ref VectorGaussian[,] winit,
            KernelFunction nodeKernel,
            KernelFunction weightKernel)
        {
            var missing = new bool[data.GetLength(0), data.GetLength(1)];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                missing[i, data.GetLength(1) - 1] = true; // last data point is missing
            }
            int Q = nodeSignalPrecs.Length;

            var toInfer   = new List <IVariable>();
            var K_node    = Utils.GramMatrix(nodeKernel, inputs);
            var K_weights = Utils.GramMatrix(weightKernel, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");

            nodeSignalPrecisions.ObservedValue = nodeSignalPrecs;

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");

            nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            var nodeNoisePrecisions = Variable.Array <double>(q).Named("nodeNoisePrecisions");

            nodeNoisePrecisions.ObservedValue = nodeNoisePrecs;
            nodeFunctionValues[q][n]          = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues           = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);

            weightFunctionValuesPredictive[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData   = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecision = Variable.Observed(obsNoisePrec).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            var finitNew = finit.Select(i => Utils.extendByOneDimension(i, Gaussian.FromMeanAndVariance(0, 1))).ToArray();

            nodeFunctions.InitialiseTo(Distribution <Vector> .Array(finitNew));

            var winitNew = new VectorGaussian[data.GetLength(0), Q];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                for (int j = 0; j < Q; j++)
                {
                    winitNew[i, j] = Utils.extendByOneDimension(winit[i, j], Gaussian.FromMeanAndVariance(0, 1));
                }
            }

            weightFunctions.InitialiseTo(Distribution <Vector> .Array(winitNew));

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctions, weightFunctions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValuesPredictive                                      /* is this redundant? */
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(nodeKernel, inputs).Inverse());
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(weightKernel, inputs).Inverse());
            ca.Reset();

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 30; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }

            var f = ca.Marginal <Gaussian[][]>("nodeFunctionValuesPredictive");
            var W = ca.Marginal <Gaussian[, ][]>("weightFunctionValuesPredictive");

            finit = ca.Marginal <VectorGaussian[]>(nodeFunctions.NameInGeneratedCode);
            winit = ca.Marginal <VectorGaussian[, ]>(weightFunctions.NameInGeneratedCode);
            return(Utils.CorrelatedPredictionsHelper(f, W, Gamma.PointMass(obsNoisePrec), Q, data.GetLength(0), data.GetLength(1) - 1));
        }
        public void MixtureOfMultivariateGaussians()
        {
            // Define a range for the number of mixture components
            Range k = new Range(2).Named("k");

            // Mixture component means
            VariableArray <Vector> means = Variable.Array <Vector>(k).Named("means");

            means[k] = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(2), PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture component precisions
            VariableArray <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(k).Named("precs");

            precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k);

            // Mixture weights
            Variable <Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights");

            // Create a variable array which will hold the data
            Range n = new Range(300).Named("n");
            VariableArray <Vector> data = Variable.Array <Vector>(n).Named("x");
            // Create latent indicator variable for each data point
            VariableArray <int> z = Variable.Array <int>(n).Named("z");

            // The mixture of Gaussians model
            using (Variable.ForEach(n))
            {
                z[n] = Variable.Discrete(weights);
                using (Variable.Switch(z[n]))
                {
                    data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
                }
            }

            // Attach some generated data
            double truePi = 0.6;

            data.ObservedValue = GenerateData(n.SizeAsInt, truePi);

            // Initialise messages randomly to break symmetry
            VariableArray <Discrete> zInit = Variable.Array <Discrete>(n).Named("zInit");
            bool useObservedValue          = true;

            if (useObservedValue)
            {
                zInit.ObservedValue = Util.ArrayInit(n.SizeAsInt, i => Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt));
            }
            else
            {
                // This approach doesn't work, because Infer.NET notices that Rand.Int is stochastic and thinks that it should perform message-passing here.
                using (Variable.ForEach(n))
                {
                    var randk = Variable <int> .Factor(new Func <int, int>(Rand.Int), (Variable <int>) k.Size);

                    randk.SetValueRange(k);
                    zInit[n] = Variable <Discrete> .Factor(Discrete.PointMass, randk, (Variable <int>) k.Size);
                }
            }
            z[n].InitialiseTo(zInit[n]);

            // The inference
            InferenceEngine ie = new InferenceEngine();

            ie.Algorithm = new VariationalMessagePassing();
            //ie.Compiler.GenerateInMemory = false;
            //ie.NumberOfIterations = 200;
            Dirichlet wDist    = (Dirichlet)ie.Infer(weights);
            Vector    wEstMean = wDist.GetMean();

            object meansActual = ie.Infer(means);

            Console.WriteLine("means = ");
            Console.WriteLine(meansActual);
            var precsActual = ie.Infer <IList <Wishart> >(precs);

            Console.WriteLine("precs = ");
            Console.WriteLine(precsActual);
            Console.WriteLine("w = {0} should be {1}", wEstMean, Vector.FromArray(truePi, 1 - truePi));
            //Console.WriteLine(StringUtil.JoinColumns("z = ", ie.Infer(z)));
            Assert.True(
                MMath.AbsDiff(wEstMean[0], truePi) < 0.05 ||
                MMath.AbsDiff(wEstMean[1], truePi) < 0.05);
        }
Beispiel #21
0
 public Matrix GetCovarianceMatrix()
 {
     return(PositiveDefiniteMatrix.IdentityScaledBy(1, this.variance));
 }
Beispiel #22
0
        /// <summary>
        /// Primary definition of the GPRN model as an Infer.NET model.
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        /// <param name="constrainWpositive">Whether to constrain W to be positive [experimental]</param>
        /// <param name="isotropicNoise">Whether to use isotropic observation noise</param>
        /// <param name="meanFunctions">Whether to include a per output mean function</param>
        /// <param name="initLoglengthscales">Initial values for the length scales of the kernels</param>
        /// <param name="sw">An output file for logging</param>
        public void GPRN_InferNET_model(Vector[] inputs,
                                        double[,] data,
                                        int Q,
                                        bool grid                    = false,
                                        bool[,] missing              = null,
                                        bool nodeFunctionNoise       = false,
                                        bool constrainWpositive      = false,
                                        bool isotropicNoise          = true,
                                        bool meanFunctions           = false,
                                        double[] initLoglengthscales = null,
                                        StreamWriter sw              = null)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0)) + new WhiteNoise(-3);
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            SummationKernel kf_weights = new SummationKernel(new SquaredExponential(1)) + new WhiteNoise(-3);
            var             K_weights  = Utils.GramMatrix(kf_weights, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues  = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValues2 = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            if (constrainWpositive)
            {
                var weightFunctionValuesCopy = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesCopy");
                weightFunctionValuesCopy[d, q][n] = Variable.GaussianFromMeanAndPrecision(weightFunctionValues[d, q][n], 100);
                Variable.ConstrainPositive(weightFunctionValuesCopy[d, q][n]);
            }
            weightFunctionValues2[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            Variable <double>      noisePrecision      = null;
            VariableArray <double> noisePrecisionArray = null;

            if (isotropicNoise)
            {
                noisePrecision = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");
                toInfer.Add(noisePrecision);
            }
            else
            {
                noisePrecisionArray    = Variable.Array <double>(d).Named("noisePrecision");
                noisePrecisionArray[d] = Variable.Random <double, Gamma>(noisePrecisionPrior).ForEach(d);
                toInfer.Add(noisePrecisionArray);
            }

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            VariableArray <VariableArray <double>, double[][]> meanFunctionValues = null;

            if (meanFunctions)
            {
                GPFactor.settings = new Settings
                {
                    solverMethod = Settings.SolverMethod.GradientDescent,
                };

                VariableArray <KernelFunction> kf = Variable.Array <KernelFunction>(d);
                kf.ObservedValue = Enumerable.Range(0, D.ObservedValue).Select(
                    o => new SummationKernel(new SquaredExponential()) + new WhiteNoise(-3)).ToArray();

                var mf = Variable.Array <Vector>(d).Named("meanFunctions");
                mf[d] = Variable <Vector> .Factor <double, Vector[], int[], KernelFunction>(MyFactors.GP, 1.0 /*Variable.GammaFromShapeAndRate(1,1)*/, inputs, new int[] { 0 },
                                                                                            kf[d]);

                mf.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
                meanFunctionValues    = Variable.Array(Variable.Array <double>(n), d).Named("meanFunctionValues");
                meanFunctionValues[d] = Variable.ArrayFromVector(mf[d], n);
                toInfer.Add(meanFunctionValues);
            }

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q] = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    if (meanFunctions)
                    {
                        noiseLessY[d, n] = Variable.Sum(temp) + meanFunctionValues[d][n];
                    }
                    else
                    {
                        noiseLessY[d, n] = Variable.Sum(temp);
                    }
                    using (Variable.IfNot(isMissing[d, n]))
                        if (isotropicNoise)
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                        }
                        else
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecisionArray[d]);
                        }
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValues2
            });

            var infer = new InferenceEngine(new VariationalMessagePassing());

            infer.ModelName = "MeanFunction";
            var ca = infer.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            var kernel = new SummationKernel(new SquaredExponential(initLoglengthscales[0]));

            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            kernel  = new SummationKernel(new SquaredExponential(initLoglengthscales[1]));
            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            ca.Reset();
            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 100; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }
            Console.WriteLine("Finished after " + it);
        }
Beispiel #23
0
        /// <summary>
        /// Infer.NET definition of the Semi Parametric Latent Factor Model of
        /// Teh, Y., Seeger, M., and Jordan, M. (AISTATS 2005).
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        public void SPLFM(
            Vector[] inputs,
            double[,] data,
            int Q,
            bool[,] missing        = null,
            bool nodeFunctionNoise = false)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0));
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weights = Variable.Array <double>(d, q).Named("weights");

            weights[d, q] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(d, q);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            var noisePrecision      = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weights[d, q] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, noisePrecision, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weights
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());

            ie.ModelName = "SPLFM";
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.Execute(100);
            var fvals      = ca.Marginal <Gaussian[][]>(nodeFunctionValues.NameInGeneratedCode)[0]; // [q][n]
            var x          = inputs.Select(i => i[0]).ToArray();
            var mplWrapper = new MatplotlibWrapper();

            mplWrapper.AddArray("x", x);
            mplWrapper.AddArray("y", fvals.Select(i => i.GetMean()).ToArray());
            mplWrapper.AddArray("s", fvals.Select(i => Math.Sqrt(i.GetVariance())).ToArray());

            mplWrapper.Plot(new string[] {
                "fill_between(x,y-s,y+s,color=\"gray\")",
                "ylabel(\"node (fitted)\")"
            });
        }