/// <summary>
        /// Compiles a model for the specified query variables and returns the generated inference algorithm.
        /// </summary>
        /// <param name="name">The name of the generated inference algorithm.</param>
        /// <param name="generatedSourceFolder">The folder to drop the generated inference algorithm to.</param>
        /// <param name="queryVariables">The query variables.</param>
        private static void GetCompiledInferenceAlgorithm(string name, string generatedSourceFolder, params IVariable[] queryVariables)
        {
            const string modelNamespace = "Microsoft.ML.Probabilistic.Learners.BayesPointMachineClassifierInternal";
            // Create the inference engine
            var engine = new InferenceEngine
            {
                ModelNamespace = modelNamespace,
                ModelName      = name,
                ShowProgress   = false
            };

            engine.Compiler.AddComments           = false; // avoid irrelevant code changes
            engine.Compiler.FreeMemory            = false;
            engine.Compiler.GeneratedSourceFolder = generatedSourceFolder;
            engine.Compiler.GenerateInMemory      = true;
            engine.Compiler.GivePriorityTo(typeof(GammaFromShapeAndRateOp_Laplace));
            engine.Compiler.RecommendedQuality = QualityBand.Experimental; // bug
            engine.Compiler.ReturnCopies       = false;
            engine.Compiler.ShowWarnings       = true;
            engine.Compiler.UseSerialSchedules = true;
            engine.Compiler.WriteSourceFiles   = true;

            // Generate the inference algorithm
            engine.GetCompiledInferenceAlgorithm(queryVariables);
        }
        public Gaussian[] IndexOfMaximumExplicit(Discrete y, out double logEvidence)
        {
            int N     = y.Dimension;
            var ev    = Variable.Bernoulli(0.5).Named("ev");
            var block = Variable.If(ev);
            var x     = Enumerable.Range(0, N).Select(o => Variable.GaussianFromMeanAndPrecision(0, 1)).ToArray();
            var yVar  = Variable <int> .Random(y).Named("y");

            for (int index = 0; index < N; index++)
            {
                using (Variable.Case(yVar, index))
                {
                    for (int i = 0; i < N; i++)
                    {
                        if (i != index)
                        {
                            Variable.ConstrainPositive(x[index] - x[i]);
                        }
                    }
                }
            }
            block.CloseBlock();
            var ie = new InferenceEngine();
            //ie.NumberOfIterations = 2;
            var toInfer = x.Select(o => (IVariable)o).ToList();

            toInfer.Add(ev);
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.Execute(10);
            logEvidence = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
            return(x.Select(o => ca.Marginal <Gaussian>(o.NameInGeneratedCode)).ToArray());
        }
        //[Fact]
        internal void GaussianTimesBetaTest2()
        {
            Variable<bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock block = Variable.If(evidence);
            //Variable<double> x = Variable.GaussianFromMeanAndVariance(1.2, 3.4).Named("x");
            //var x = Variable.Constant<double>(1.2).Named("x"); 
            var s = Variable.Beta(5.6, 4.8).Named("s");
            //var s = Variable.GaussianFromMeanAndPrecision(0, 1).Named("s"); 
            Variable<double> y = 1.2*s;
            y.Name = "y";
            Variable.ConstrainEqualRandom(y, new Gaussian(2.7, 0.001));
            block.CloseBlock();

            InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing());
            var ca = engine.GetCompiledInferenceAlgorithm(evidence, s);
            ca.Reset();

            double oldLogEvidence = double.NegativeInfinity;
            for (int i = 0; i < 1000; i++)
            {
                ca.Update(1);
                double logEvidence1 = ca.Marginal<Bernoulli>(evidence.NameInGeneratedCode).LogOdds;
                Console.WriteLine(logEvidence1);
                if (i > 20 && System.Math.Abs(logEvidence1 - oldLogEvidence) < 0.01)
                    break;
                oldLogEvidence = logEvidence1;
            }

            //Gaussian xActual = ca.Marginal<Gaussian>(x);
            Beta sActual = ca.Marginal<Beta>(s.NameInGeneratedCode);
            //Console.WriteLine("x = {0}", xActual);
            Console.WriteLine("s = {0}", sActual);
        }
示例#4
0
        public BayesPointMachine(int nFeatures, double noise)
        {
            // Training model
            nTrain = Variable.Observed(default(int)).Named(nameof(nTrain));
            Range trainItem = new Range(nTrain);

            trainItem.Name            = nameof(trainItem);
            trainingLabels            = Variable.Observed(default(bool[]), trainItem).Named(nameof(trainingLabels));
            trainingItems             = Variable.Observed(default(Vector[]), trainItem).Named(nameof(trainingItems));
            weights                   = Variable.Random(new VectorGaussian(Vector.Zero(nFeatures), PositiveDefiniteMatrix.Identity(nFeatures))).Named(nameof(weights));
            trainingLabels[trainItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, trainingItems[trainItem]), noise));

            // Testing model
            nTest = Variable.Observed(default(int)).Named(nameof(nTest));
            Range testItem = new Range(nTest);

            testItem.Name       = nameof(testItem);
            testItems           = Variable.Observed(default(Vector[]), testItem).Named(nameof(testItems));
            testLabels          = Variable.Array <bool>(testItem).Named(nameof(testLabels));
            engine              = new InferenceEngine();
            engine.ShowProgress = false;
            engine.Compiler.WriteSourceFiles = false;
            engine.NumberOfIterations        = 5;
            if (singleModel)
            {
                testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, testItems[testItem]), noise));
            }
            else
            {
                weightPosterior = Variable.Observed(default(VectorGaussian)).Named(nameof(weightPosterior));
                Variable <Vector> testWeights = Variable <Vector> .Random(weightPosterior);

                testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(testWeights, testItems[testItem]), noise));

                // Force compilation of the training model.
                engine.GetCompiledInferenceAlgorithm(weights);
            }
            // Force compilation of the testing model.
            // This also defines the variables to be inferred, obviating OptimizeForVariables.
            // This requires observed variables to have values, but they can be null.
            engine.GetCompiledInferenceAlgorithm(testLabels);
        }
示例#5
0
        internal void FitNegativeBinomial()
        {
            // generate data from the model
            double r = 2;
            double p = 0.3;

            int[] data = new int[] { 1, 4, 5, 14, 0, 3, 2, 18, 0, 1, 8, 1, 4, 3, 6, 4, 9, 5, 1, 10, 5, 9, 2, 3, 3, 9, 14, 3, 5, 12 };
            int   N    = data.Length;

            Variable <bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock         block    = Variable.If(evidence);
            var             rate     = Variable.GammaFromShapeAndRate(1, 1).Named("rate");
            //var shape = Variable.GammaFromShapeAndRate(1,1).Named("shape");
            var   shape  = Variable.GammaFromShapeAndRate(1, 1).Named("shape");
            var   lambda = Variable.GammaFromShapeAndRate(shape, rate).Named("lambda");
            Range Nrange = new Range(N);
            var   y      = Variable.Array <int>(Nrange).Named("y");

            y[Nrange]       = Variable.Poisson(lambda).ForEach(Nrange);
            y.ObservedValue = data;
            block.CloseBlock();
            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            ie.ShowFactorGraph = true;
            var ca = ie.GetCompiledInferenceAlgorithm(evidence, rate, shape);

            ca.Reset();
            double oldLogEvidence = double.NegativeInfinity;

            for (int i = 0; i < 1000; i++)
            {
                ca.Update(1);
                double logEvidence1 = ca.Marginal <Bernoulli>(evidence.NameInGeneratedCode).LogOdds;
                Console.WriteLine(logEvidence1);
                if (i > 20 && System.Math.Abs(logEvidence1 - oldLogEvidence) < 0.01)
                {
                    break;
                }
                oldLogEvidence = logEvidence1;
            }
            Gamma  shapePost = ca.Marginal <Gamma>(shape.NameInGeneratedCode);
            Gamma  ratePost = ca.Marginal <Gamma>(rate.NameInGeneratedCode);
            double mean, variance;

            shapePost.GetMeanAndVariance(out mean, out variance);
            Console.WriteLine("shape = " + mean + " +/- " + System.Math.Sqrt(variance) + " true= " + r);
            ratePost.GetMeanAndVariance(out mean, out variance);
            Console.WriteLine("rate = " + mean + " +/- " + System.Math.Sqrt(variance) + " true= " + p / (1 - p));
        }
示例#6
0
        public void PoissonExpTest2()
        {
            Variable <bool>   evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock           block    = Variable.If(evidence);
            Variable <double> x        = Variable.GaussianFromMeanAndVariance(1.2, 3.4).Named("x");

            Rand.Restart(12347);
            int n = 10;
            int N = 10;

            int[] data = new int[N];
            for (int i = 0; i < N; i++)
            {
                data[i] = Rand.Binomial(n, 1.0 / (double)n);
            }
            data = new int[] { 5, 6, 7 };
            Range item = new Range(data.Length).Named("item");
            VariableArray <double> ex = Variable.Array <double>(item).Named("ex");

            ex[item] = Variable.Exp(x).ForEach(item);
            VariableArray <int> y = Variable.Array <int>(item).Named("y");

            y[item] = Variable.Poisson(ex[item]);
            block.CloseBlock();
            y.ObservedValue = data;

            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            var ca = ie.GetCompiledInferenceAlgorithm(evidence, x);

            double oldLogEvidence = double.NegativeInfinity;

            for (int i = 0; i < 1000; i++)
            {
                ca.Update(1);
                double logEvidence1 = ca.Marginal <Bernoulli>(evidence.NameInGeneratedCode).LogOdds;
                Console.WriteLine(logEvidence1);
                if (i > 20 && System.Math.Abs(logEvidence1 - oldLogEvidence) < 1e-10)
                {
                    break;
                }
                oldLogEvidence = logEvidence1;
            }
            Gaussian xExpected = new Gaussian(1.755071011884509, 0.055154577283323);
            Gaussian xActual   = ca.Marginal <Gaussian>(x.NameInGeneratedCode);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
            Assert.True(xExpected.MaxDiff(xActual) < 1e-3);
        }
示例#7
0
        /// <summary>
        /// Compiles a model for the specified query variables and returns the generated inference algorithm.
        /// </summary>
        /// <param name="name">The name of the generated inference algorithm.</param>
        /// <param name="generatedSourceFolder">The folder to drop the generated inference algorithm to.</param>
        /// <param name="queryVariables">The query variables.</param>
        private static void GetCompiledInferenceAlgorithm(string name, string generatedSourceFolder, params IVariable[] queryVariables)
        {
            // Create the inference engine
            var engine = new InferenceEngine
            {
                ModelName    = name,
                ShowProgress = false
            };

            engine.Compiler.AddComments           = true;
            engine.Compiler.FreeMemory            = false;
            engine.Compiler.GeneratedSourceFolder = generatedSourceFolder;
            engine.Compiler.GenerateInMemory      = true;
            engine.Compiler.GivePriorityTo(typeof(GammaFromShapeAndRateOp_Laplace));
            engine.Compiler.RecommendedQuality = QualityBand.Experimental; // TFS bug 399
            engine.Compiler.ReturnCopies       = false;
            engine.Compiler.ShowWarnings       = true;
            engine.Compiler.UseSerialSchedules = true;
            engine.Compiler.WriteSourceFiles   = true;

            // Generate the inference algorithm
            engine.GetCompiledInferenceAlgorithm(queryVariables);
        }
        public Gaussian[] IndexOfMaximumFactorCA(Discrete y, out double logEvidence)
        {
            int N     = y.Dimension;
            var n     = new Range(N);
            var ev    = Variable.Bernoulli(0.5).Named("ev");
            var block = Variable.If(ev);
            var x     = Variable.Array <double>(n).Named("x");

            x[n] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(n);
            var p = IndexOfMaximum(x);

            Variable.ConstrainEqualRandom(p, y);
            block.CloseBlock();
            var ie = new InferenceEngine();

            ie.ModelName = "IndexOfMaximumCA";
            //ie.NumberOfIterations = 2;
            var toinfer = new List <IVariable>();

            toinfer.Add(x);
            toinfer.Add(ev);
            ie.OptimiseForVariables = toinfer;

            var ca = ie.GetCompiledInferenceAlgorithm(x, ev);

            ca.Reset();
            Gaussian[] xPost = null;
            logEvidence = 0;
            for (int i = 0; i < 10; i++)
            {
                ca.Update(1);
                logEvidence = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                xPost       = ca.Marginal <Gaussian[]>(x.NameInGeneratedCode);
            }
            return(xPost);
        }
示例#9
0
        /// <summary>
        /// Initializes a new instance of the <see cref="BinaryModel"/> class.
        /// </summary>
        /// <param name="trainModel">If set to <c>true</c> train model.</param>
        /// <param name="showFactorGraph">If set to <c>true</c> show factor graph.</param>
        /// <param name="debug">If set to <c>true</c> debug.</param>
        public BinaryModel(bool trainModel, bool showFactorGraph = false, bool debug = false)
        {
            evidence = Variable.Bernoulli(0.5).Named("evidence");

            using (Variable.If(evidence))
            {
                numberOfResidents = Variable.New<int>().Named("numberOfResidents").Attrib(new DoNotInfer());
                numberOfFeatures = Variable.New<int>().Named("numberOfFeatures").Attrib(new DoNotInfer());

                var resident = new Range(numberOfResidents).Named("resident");
                var feature = new Range(numberOfFeatures).Named("feature");

                numberOfExamples = Variable.Array<int>(resident).Named("numberOfExamples").Attrib(new DoNotInfer());
                var example = new Range(numberOfExamples[resident]).Named("example").Attrib(new Sequential());

                noisePrecision = Variable.New<double>().Named("noisePrecision").Attrib(new DoNotInfer());

                weightPriorMeans = Variable.New<GaussianArray>().Named("weightPriorMeans").Attrib(new DoNotInfer());
                weightPriorPrecisions = Variable.New<GammaArray>().Named("weightPriorPrecisions").Attrib(new DoNotInfer());

                weightMeans = Variable.Array<double>(feature).Named("weightMeans");
                weightPrecisions = Variable.Array<double>(feature).Named("weightPrecisions");

                weightMeans.SetTo(Variable<double[]>.Random(weightPriorMeans));
                weightPrecisions.SetTo(Variable<double[]>.Random(weightPriorPrecisions));

                weights = Variable.Array(Variable.Array<double>(feature), resident).Named("weights");
                featureValues = Variable.Array(Variable.Array(Variable.Array<double>(feature), example), resident).Named("featureValues").Attrib(new DoNotInfer());

                //			if (!useBias)
                //			{
                //				thresholdPriors = Variable.New<GaussianArray>().Named("thresholdPrior").Attrib(new DoNotInfer());
                //				thresholds = Variable.Array<double>(resident).Named("threshold");
                //				thresholds.SetTo(Variable<double[]>.Random(thresholdPriors));
                //			}

                activities = Variable.Array(Variable.Array<bool>(example), resident).Named("activities");
                // activities[resident][example].AddAttribute(new MarginalPrototype(new Bernoulli()));

                using (Variable.ForEach(resident))
                {
                    var products = Variable.Array(Variable.Array<double>(feature), example).Named("products");
                    var scores = Variable.Array<double>(example).Named("scores");
                    var scoresPlusNoise = Variable.Array<double>(example).Named("scoresPlusNoise");

                    weights[resident][feature] = Variable.GaussianFromMeanAndPrecision(weightMeans[feature], weightPrecisions[feature]);

                    using (Variable.ForEach(example))
                    {
                        using (Variable.ForEach(feature))
                        {
                            products[example][feature] = weights[resident][feature] * featureValues[resident][example][feature];
                        }

                        scores[example] = Variable.Sum(products[example]).Named("score");
                        scoresPlusNoise[example] = Variable.GaussianFromMeanAndPrecision(scores[example], noisePrecision).Named("scorePlusNoise");

                        //					if (useBias)
                        {
                            activities[resident][example] = scoresPlusNoise[example] > 0;
                        }
                        //					else
                        //					{
                        //						var diff = (scoresPlusNoise[example] - thresholds[resident]).Named("diff");
                        //						activities[example][resident] = diff > 0;
                        //					}
                    }
                }
            }

            engine = new InferenceEngine
            {
                Algorithm = new ExpectationPropagation { DefaultNumberOfIterations = trainModel ? 10 : 1 },
                ShowFactorGraph = showFactorGraph,
                ShowProgress = false,
                // BrowserMode = BrowserMode.Never, // debug ? BrowserMode.OnError : BrowserMode.Never,
                ShowWarnings = debug
            };

            if (debug)
            {
                engine.Compiler.GenerateInMemory = false;
                engine.Compiler.WriteSourceFiles = true;
                engine.Compiler.IncludeDebugInformation = true;
                engine.Compiler.CatchExceptions = true;
            }

            #if USE_PRECOMPILED_ALGORITHM
            numberOfResidents.ObservedValue = default(int);
            numberOfExamples.ObservedValue = default(int);
            numberOfFeatures.ObservedValue = default(int);
            noisePrecision.ObservedValue = default(double);
            featureValues.ObservedValue = default(double[][][]);
            weightPriorMeans.ObservedValue = default(DistributionStructArray<Gaussian, double>); // (DistributionStructArray<Gaussian, double>)Distribution<double>.Array(default(Gaussian[]));
            weightPriorPrecisions.ObservedValue = default(DistributionStructArray<Gamma, double>); // (DistributionStructArray<Gamma, double>)Distribution<double>.Array(default(Gamma[]));
            activities.ObservedValue = default(bool[][]);

            if (trainModel)
            {
                activities.AddAttribute(new DoNotInfer());
                algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { weights, weightMeans, weightPrecisions });
            }
            else
            {
                activities.AddAttribute(QueryTypes.Marginal);
                algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { activities });
            }
            #endif
        }
        public int MulticlassRegression(Vector[] xObs, int[] yObs, int C, out VectorGaussian[] bPost, out Gaussian[] meanPost, out double lowerBound, object softmaxOperator,
                                        bool trackLowerBound = false)
        {
            int             N     = xObs.Length;
            int             K     = xObs[0].Count;
            var             c     = new Range(C).Named("c");
            var             n     = new Range(N).Named("n");
            Variable <bool> ev    = null;
            IfBlock         model = null;

            if (trackLowerBound)
            {
                ev    = Variable.Bernoulli(0.5).Named("evidence");
                model = Variable.If(ev);
            }
            // model
            var B = Variable.Array <Vector>(c).Named("coefficients");

            B[c] = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(K), PositiveDefiniteMatrix.Identity(K)).ForEach(c);
            var m = Variable.Array <double>(c).Named("mean");

            m[c] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(c);
            Variable.ConstrainEqualRandom(B[C - 1], VectorGaussian.PointMass(Vector.Zero(K)));
            Variable.ConstrainEqualRandom(m[C - 1], Gaussian.PointMass(0));
            var x = Variable.Array <Vector>(n);

            x.ObservedValue = xObs;
            var yData = Variable.Array <int>(n);

            yData.ObservedValue = yObs;
            var g = Variable.Array(Variable.Array <double>(c), n);

            g[n][c] = Variable.InnerProduct(B[c], x[n]) + m[c];
            var p = Variable.Array <Vector>(n);

            p[n] = Variable.Softmax(g[n]);
            using (Variable.ForEach(n))
                yData[n] = Variable.Discrete(p[n]);
            if (trackLowerBound)
            {
                model.CloseBlock();
            }
            // inference
            var ie = new InferenceEngine(new VariationalMessagePassing());

            ie.Compiler.GivePriorityTo(softmaxOperator);
            var ca        = ie.GetCompiledInferenceAlgorithm(ev, B, m);
            var start     = DateTime.Now;
            var initBound = double.NegativeInfinity;
            int i         = 0;

            lowerBound = 0;
            for (i = 1; i <= 50; i++)
            {
                ca.Update(1);
                lowerBound = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(i + "," + lowerBound + "," + (DateTime.Now - start).TotalMilliseconds);
                if (System.Math.Abs(initBound - lowerBound) < 1e-2)
                {
                    break;
                }
                initBound = lowerBound;
                start     = DateTime.Now;
            }

            bPost    = ca.Marginal <VectorGaussian[]>(B.NameInGeneratedCode);
            meanPost = ca.Marginal <Gaussian[]>(m.NameInGeneratedCode);
            return(i);
        }
示例#11
0
        internal void BetaRegression()
        {
            int P = 8;

            double[] b = new double[P];
            for (int p = 0; p < P; p++)
            {
                b[p] = Rand.Beta(1, 1);
            }
            int N = 100;

            double[][] X = new double[N][];
            //Gaussian[][] softX = new Gaussian[N][];
            double[] Y = new double[N];
            for (int n = 0; n < N; n++)
            {
                X[n] = new double[P];
                //softX[n] = new Gaussian[P];
                Y[n] = 0;
                for (int p = 0; p < P; p++)
                {
                    X[n][p] = Rand.Normal();
                    //softX[n][p] = new Gaussian(X[n][p], 1e-4);
                    Y[n] += X[n][p] * b[p];
                }
            }

            Variable <bool>        evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock                block    = Variable.If(evidence);
            Range                  dim      = new Range(P).Named("P");
            Range                  item     = new Range(N).Named("N");
            VariableArray <double> w        = Variable.Array <double>(dim).Named("w");

            w[dim] = Variable.Beta(1, 1).ForEach(dim);
            var x        = Variable.Array(Variable.Array <double>(dim), item).Named("x");
            var softXvar = Variable.Array(Variable.Array <double>(dim), item).Named("softx");

            softXvar.ObservedValue = X;
            x[item][dim]           = Variable.GaussianFromMeanAndPrecision(softXvar[item][dim], 1e4);
            var wx = Variable.Array(Variable.Array <double>(dim), item).Named("wx");

            wx[item][dim] = x[item][dim] * w[dim];
            var sum = Variable.Array <double>(item).Named("sum");

            sum[item] = Variable.Sum(wx[item]);
            var prec = Variable.GammaFromShapeAndRate(.1, .1).Named("Noise");
            var y    = Variable.Array <double>(item).Named("y");

            y[item] = Variable.GaussianFromMeanAndPrecision(sum[item], prec);
            block.CloseBlock();
            y.ObservedValue = Y;

            InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing());
            var             ca     = engine.GetCompiledInferenceAlgorithm(evidence, w);

            ca.Reset();

            double oldLogEvidence = double.NegativeInfinity;

            for (int i = 0; i < 1000; i++)
            {
                ca.Update(1);
                double logEvidence1 = ca.Marginal <Bernoulli>(evidence.NameInGeneratedCode).LogOdds;
                Console.WriteLine(logEvidence1);
                if (i > 20 && System.Math.Abs(logEvidence1 - oldLogEvidence) < 0.01)
                {
                    break;
                }
                oldLogEvidence = logEvidence1;
            }

            DistributionArray <Beta> wInferred = ca.Marginal <DistributionArray <Beta> >(w.NameInGeneratedCode);

            for (int p = 0; p < P; p++)
            {
                Console.WriteLine("w[{0}] = {1} +/- {2} should be {3}",
                                  p, wInferred[p].GetMean(), System.Math.Sqrt(wInferred[p].GetVariance()), b[p]);
            }
        }
示例#12
0
        /// <summary>
        /// Initializes a new instance of the <see cref="BinaryModel"/> class.
        /// </summary>
        /// <param name="trainModel">If set to <c>true</c> train model.</param>
        /// <param name="showFactorGraph">If set to <c>true</c> show factor graph.</param>
        /// <param name="debug">If set to <c>true</c> debug.</param>
        public BinaryModel(bool trainModel, bool showFactorGraph = false, bool debug = false)
        {
            evidence = Variable.Bernoulli(0.5).Named("evidence");

            using (Variable.If(evidence))
            {
                numberOfResidents = Variable.New <int>().Named("numberOfResidents").Attrib(new DoNotInfer());
                numberOfFeatures  = Variable.New <int>().Named("numberOfFeatures").Attrib(new DoNotInfer());

                var resident = new Range(numberOfResidents).Named("resident");
                var feature  = new Range(numberOfFeatures).Named("feature");

                numberOfExamples = Variable.Array <int>(resident).Named("numberOfExamples").Attrib(new DoNotInfer());
                var example = new Range(numberOfExamples[resident]).Named("example").Attrib(new Sequential());

                noisePrecision = Variable.New <double>().Named("noisePrecision").Attrib(new DoNotInfer());

                weightPriorMeans      = Variable.New <GaussianArray>().Named("weightPriorMeans").Attrib(new DoNotInfer());
                weightPriorPrecisions = Variable.New <GammaArray>().Named("weightPriorPrecisions").Attrib(new DoNotInfer());

                weightMeans      = Variable.Array <double>(feature).Named("weightMeans");
                weightPrecisions = Variable.Array <double>(feature).Named("weightPrecisions");

                weightMeans.SetTo(Variable <double[]> .Random(weightPriorMeans));
                weightPrecisions.SetTo(Variable <double[]> .Random(weightPriorPrecisions));

                weights       = Variable.Array(Variable.Array <double>(feature), resident).Named("weights");
                featureValues = Variable.Array(Variable.Array(Variable.Array <double>(feature), example), resident).Named("featureValues").Attrib(new DoNotInfer());

                //			if (!useBias)
                //			{
                //				thresholdPriors = Variable.New<GaussianArray>().Named("thresholdPrior").Attrib(new DoNotInfer());
                //				thresholds = Variable.Array<double>(resident).Named("threshold");
                //				thresholds.SetTo(Variable<double[]>.Random(thresholdPriors));
                //			}

                activities = Variable.Array(Variable.Array <bool>(example), resident).Named("activities");
                // activities[resident][example].AddAttribute(new MarginalPrototype(new Bernoulli()));

                using (Variable.ForEach(resident))
                {
                    var products        = Variable.Array(Variable.Array <double>(feature), example).Named("products");
                    var scores          = Variable.Array <double>(example).Named("scores");
                    var scoresPlusNoise = Variable.Array <double>(example).Named("scoresPlusNoise");

                    weights[resident][feature] = Variable.GaussianFromMeanAndPrecision(weightMeans[feature], weightPrecisions[feature]);

                    using (Variable.ForEach(example))
                    {
                        using (Variable.ForEach(feature))
                        {
                            products[example][feature] = weights[resident][feature] * featureValues[resident][example][feature];
                        }

                        scores[example]          = Variable.Sum(products[example]).Named("score");
                        scoresPlusNoise[example] = Variable.GaussianFromMeanAndPrecision(scores[example], noisePrecision).Named("scorePlusNoise");

                        //					if (useBias)
                        {
                            activities[resident][example] = scoresPlusNoise[example] > 0;
                        }
                        //					else
                        //					{
                        //						var diff = (scoresPlusNoise[example] - thresholds[resident]).Named("diff");
                        //						activities[example][resident] = diff > 0;
                        //					}
                    }
                }
            }

            engine = new InferenceEngine
            {
                Algorithm = new ExpectationPropagation {
                    DefaultNumberOfIterations = trainModel ? 10 : 1
                },
                ShowFactorGraph = showFactorGraph,
                ShowProgress    = false,
                // BrowserMode = BrowserMode.Never, // debug ? BrowserMode.OnError : BrowserMode.Never,
                ShowWarnings = debug
            };

            if (debug)
            {
                engine.Compiler.GenerateInMemory        = false;
                engine.Compiler.WriteSourceFiles        = true;
                engine.Compiler.IncludeDebugInformation = true;
                engine.Compiler.CatchExceptions         = true;
            }

#if USE_PRECOMPILED_ALGORITHM
            numberOfResidents.ObservedValue     = default(int);
            numberOfExamples.ObservedValue      = default(int);
            numberOfFeatures.ObservedValue      = default(int);
            noisePrecision.ObservedValue        = default(double);
            featureValues.ObservedValue         = default(double[][][]);
            weightPriorMeans.ObservedValue      = default(DistributionStructArray <Gaussian, double>);       // (DistributionStructArray<Gaussian, double>)Distribution<double>.Array(default(Gaussian[]));
            weightPriorPrecisions.ObservedValue = default(DistributionStructArray <Gamma, double>);          // (DistributionStructArray<Gamma, double>)Distribution<double>.Array(default(Gamma[]));
            activities.ObservedValue            = default(bool[][]);

            if (trainModel)
            {
                activities.AddAttribute(new DoNotInfer());
                algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { weights, weightMeans, weightPrecisions });
            }
            else
            {
                activities.AddAttribute(QueryTypes.Marginal);
                algorithm = engine.GetCompiledInferenceAlgorithm(new IVariable[] { activities });
            }
#endif
        }
示例#13
0
        /// <summary>
        /// Primary definition of the GPRN model as an Infer.NET model.
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        /// <param name="constrainWpositive">Whether to constrain W to be positive [experimental]</param>
        /// <param name="isotropicNoise">Whether to use isotropic observation noise</param>
        /// <param name="meanFunctions">Whether to include a per output mean function</param>
        /// <param name="initLoglengthscales">Initial values for the length scales of the kernels</param>
        /// <param name="sw">An output file for logging</param>
        public void GPRN_InferNET_model(Vector[] inputs,
                                        double[,] data,
                                        int Q,
                                        bool grid                    = false,
                                        bool[,] missing              = null,
                                        bool nodeFunctionNoise       = false,
                                        bool constrainWpositive      = false,
                                        bool isotropicNoise          = true,
                                        bool meanFunctions           = false,
                                        double[] initLoglengthscales = null,
                                        StreamWriter sw              = null)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0)) + new WhiteNoise(-3);
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            SummationKernel kf_weights = new SummationKernel(new SquaredExponential(1)) + new WhiteNoise(-3);
            var             K_weights  = Utils.GramMatrix(kf_weights, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues  = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValues2 = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            if (constrainWpositive)
            {
                var weightFunctionValuesCopy = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesCopy");
                weightFunctionValuesCopy[d, q][n] = Variable.GaussianFromMeanAndPrecision(weightFunctionValues[d, q][n], 100);
                Variable.ConstrainPositive(weightFunctionValuesCopy[d, q][n]);
            }
            weightFunctionValues2[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            Variable <double>      noisePrecision      = null;
            VariableArray <double> noisePrecisionArray = null;

            if (isotropicNoise)
            {
                noisePrecision = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");
                toInfer.Add(noisePrecision);
            }
            else
            {
                noisePrecisionArray    = Variable.Array <double>(d).Named("noisePrecision");
                noisePrecisionArray[d] = Variable.Random <double, Gamma>(noisePrecisionPrior).ForEach(d);
                toInfer.Add(noisePrecisionArray);
            }

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            VariableArray <VariableArray <double>, double[][]> meanFunctionValues = null;

            if (meanFunctions)
            {
                GPFactor.settings = new Settings
                {
                    solverMethod = Settings.SolverMethod.GradientDescent,
                };

                VariableArray <KernelFunction> kf = Variable.Array <KernelFunction>(d);
                kf.ObservedValue = Enumerable.Range(0, D.ObservedValue).Select(
                    o => new SummationKernel(new SquaredExponential()) + new WhiteNoise(-3)).ToArray();

                var mf = Variable.Array <Vector>(d).Named("meanFunctions");
                mf[d] = Variable <Vector> .Factor <double, Vector[], int[], KernelFunction>(MyFactors.GP, 1.0 /*Variable.GammaFromShapeAndRate(1,1)*/, inputs, new int[] { 0 },
                                                                                            kf[d]);

                mf.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
                meanFunctionValues    = Variable.Array(Variable.Array <double>(n), d).Named("meanFunctionValues");
                meanFunctionValues[d] = Variable.ArrayFromVector(mf[d], n);
                toInfer.Add(meanFunctionValues);
            }

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q] = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    if (meanFunctions)
                    {
                        noiseLessY[d, n] = Variable.Sum(temp) + meanFunctionValues[d][n];
                    }
                    else
                    {
                        noiseLessY[d, n] = Variable.Sum(temp);
                    }
                    using (Variable.IfNot(isMissing[d, n]))
                        if (isotropicNoise)
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                        }
                        else
                        {
                            observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecisionArray[d]);
                        }
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValues2
            });

            var infer = new InferenceEngine(new VariationalMessagePassing());

            infer.ModelName = "MeanFunction";
            var ca = infer.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            var kernel = new SummationKernel(new SquaredExponential(initLoglengthscales[0]));

            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            kernel  = new SummationKernel(new SquaredExponential(initLoglengthscales[1]));
            kernel += new WhiteNoise(-3);
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(kernel, inputs).Inverse());

            ca.Reset();
            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 100; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }
            Console.WriteLine("Finished after " + it);
        }
示例#14
0
        /// <summary>
        /// An implementation of GPRN specialised for one step look ahead multivariate volatility experiments
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <returns>Predicted covariance for the next time point</returns>
        public VectorGaussian GPRN_MultivariateVolatility(
            Vector[] inputs,
            double[,] data,
            double[] nodeSignalPrecs,
            double[] nodeNoisePrecs,
            double obsNoisePrec,
            ref VectorGaussian[] finit,
            ref VectorGaussian[,] winit,
            KernelFunction nodeKernel,
            KernelFunction weightKernel)
        {
            var missing = new bool[data.GetLength(0), data.GetLength(1)];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                missing[i, data.GetLength(1) - 1] = true; // last data point is missing
            }
            int Q = nodeSignalPrecs.Length;

            var toInfer   = new List <IVariable>();
            var K_node    = Utils.GramMatrix(nodeKernel, inputs);
            var K_weights = Utils.GramMatrix(weightKernel, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");

            nodeSignalPrecisions.ObservedValue = nodeSignalPrecs;

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");

            nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            var nodeNoisePrecisions = Variable.Array <double>(q).Named("nodeNoisePrecisions");

            nodeNoisePrecisions.ObservedValue = nodeNoisePrecs;
            nodeFunctionValues[q][n]          = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

            var weightFunctions   = Variable.Array <Vector>(d, q).Named("weightFunctions");
            var K_weights_inverse = Variable.Observed(K_weights.Inverse()).Named("K_weights_inverse");

            weightFunctions[d, q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, Variable.Constant <double>(1), K_weights_inverse).ForEach(d, q);

            weightFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var weightFunctionValues           = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValues");
            var weightFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), d, q).Named("weightFunctionValuesPredictive");

            weightFunctionValues[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);

            weightFunctionValuesPredictive[d, q] = Variable.ArrayFromVector(weightFunctions[d, q], n);
            var observedData   = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecision = Variable.Observed(obsNoisePrec).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weightFunctionValues[d, q][n] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            var finitNew = finit.Select(i => Utils.extendByOneDimension(i, Gaussian.FromMeanAndVariance(0, 1))).ToArray();

            nodeFunctions.InitialiseTo(Distribution <Vector> .Array(finitNew));

            var winitNew = new VectorGaussian[data.GetLength(0), Q];

            for (int i = 0; i < data.GetLength(0); i++)
            {
                for (int j = 0; j < Q; j++)
                {
                    winitNew[i, j] = Utils.extendByOneDimension(winit[i, j], Gaussian.FromMeanAndVariance(0, 1));
                }
            }

            weightFunctions.InitialiseTo(Distribution <Vector> .Array(winitNew));

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, nodeFunctions, weightFunctions, nodeFunctionValuesPredictive, weightFunctionValues, weightFunctionValuesPredictive                                      /* is this redundant? */
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.SetObservedValue(K_node_inverse.NameInGeneratedCode, Utils.GramMatrix(nodeKernel, inputs).Inverse());
            ca.SetObservedValue(K_weights_inverse.NameInGeneratedCode, Utils.GramMatrix(weightKernel, inputs).Inverse());
            ca.Reset();

            double oldML = double.NegativeInfinity;
            double ml    = 0;
            int    it    = 0;

            for (; it < 30; it++)
            {
                ca.Update(1);
                ml = ca.Marginal <Bernoulli>(ev.NameInGeneratedCode).LogOdds;
                Console.WriteLine(ml);
                if (Math.Abs(oldML - ml) < .1)
                {
                    break;
                }
                oldML = ml;
            }

            var f = ca.Marginal <Gaussian[][]>("nodeFunctionValuesPredictive");
            var W = ca.Marginal <Gaussian[, ][]>("weightFunctionValuesPredictive");

            finit = ca.Marginal <VectorGaussian[]>(nodeFunctions.NameInGeneratedCode);
            winit = ca.Marginal <VectorGaussian[, ]>(weightFunctions.NameInGeneratedCode);
            return(Utils.CorrelatedPredictionsHelper(f, W, Gamma.PointMass(obsNoisePrec), Q, data.GetLength(0), data.GetLength(1) - 1));
        }
示例#15
0
        /// <summary>
        /// Infer.NET definition of the Semi Parametric Latent Factor Model of
        /// Teh, Y., Seeger, M., and Jordan, M. (AISTATS 2005).
        /// </summary>
        /// <param name="inputs">Covariates X</param>
        /// <param name="data">Outputs Y</param>
        /// <param name="Q">Number of latent functions</param>
        /// <param name="missing">Which elements of Y are missing</param>
        /// <param name="nodeFunctionNoise">Whether to include node noise</param>
        public void SPLFM(
            Vector[] inputs,
            double[,] data,
            int Q,
            bool[,] missing        = null,
            bool nodeFunctionNoise = false)
        {
            var             toInfer = new List <IVariable>();
            SummationKernel kf_node = new SummationKernel(new SquaredExponential(0));
            var             K_node  = Utils.GramMatrix(kf_node, inputs);

            var D    = Variable.Observed <int>(data.GetLength(0)).Named("D");
            var d    = new Range(D).Named("d");
            var Qvar = Variable.Observed <int>(Q).Named("Q");
            var q    = new Range(Qvar).Named("q");
            var N    = Variable.Observed <int>(data.GetLength(1)).Named("N");
            var n    = new Range(N).Named("n");

            if (missing == null)
            {
                missing = new bool[D.ObservedValue, N.ObservedValue]; // check this is all false
            }
            var ev         = Variable.Bernoulli(.5).Named("ev");
            var modelBlock = Variable.If(ev);

            var nodeSignalPrecisions = Variable.Array <double>(q).Named("nodeSignalPrecisions");
            // set this to 1 if not learning signal variance
            var nodeSignalPrecisionsPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .1)).ToArray(), q).Named("nodeSignalPrecisionsPrior");

            nodeSignalPrecisions[q] = Variable.Random <double, Gamma>(nodeSignalPrecisionsPrior[q]);

            var nodeFunctions  = Variable.Array <Vector>(q).Named("nodeFunctions");
            var K_node_inverse = Variable.Observed(K_node.Inverse()).Named("K_node_inverse");

            nodeFunctions[q] = Variable <Vector> .Factor(MyFactors.VectorGaussianScaled, nodeSignalPrecisions[q], K_node_inverse);

            nodeFunctions.AddAttribute(new MarginalPrototype(new VectorGaussian(N.ObservedValue)));
            var nodeFunctionValues           = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValues");
            var nodeFunctionValuesPredictive = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesPredictive");

            VariableArray <double> nodeNoisePrecisions = null;

            if (nodeFunctionNoise)
            {
                var nodeFunctionValuesClean = Variable.Array(Variable.Array <double>(n), q).Named("nodeFunctionValuesClean");
                nodeFunctionValuesClean[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeNoisePrecisions        = Variable.Array <double>(q).Named("nodeNoisePrecisions");
                var nodeNoisePrecisionPrior = Variable.Observed(Enumerable.Range(0, Q).Select(_ => Gamma.FromShapeAndRate(.1, .01)).ToArray(), q).Named("nodeNoisePrecisionPrior");
                nodeNoisePrecisions[q] = Variable.Random <double, Gamma>(nodeNoisePrecisionPrior[q]);
                toInfer.Add(nodeNoisePrecisions);
                nodeFunctionValues[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);

                nodeFunctionValuesPredictive[q][n] = Variable.GaussianFromMeanAndPrecision(nodeFunctionValuesClean[q][n], nodeNoisePrecisions[q]);
            }
            else
            {
                nodeFunctionValues[q]           = Variable.ArrayFromVector(nodeFunctions[q], n);
                nodeFunctionValuesPredictive[q] = Variable.ArrayFromVector(nodeFunctions[q], n);
            }

            var weights = Variable.Array <double>(d, q).Named("weights");

            weights[d, q] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(d, q);
            var observedData        = Variable.Array <double>(d, n).Named("observedData");
            var noisePrecisionPrior = Variable.Observed(Gamma.FromShapeAndRate(1, .1)).Named("noisePrecisionPrior");
            var noisePrecision      = Variable.Random <double, Gamma>(noisePrecisionPrior).Named("noisePrecision");

            var isMissing = Variable.Array <bool>(d, n).Named("isMissing");

            isMissing.ObservedValue = missing;

            var noiseLessY = Variable.Array <double>(d, n).Named("noiseLessY");

            using (Variable.ForEach(n))
                using (Variable.ForEach(d))
                {
                    var temp = Variable.Array <double>(q).Named("temp");
                    temp[q]          = weights[d, q] * nodeFunctionValues[q][n];
                    noiseLessY[d, n] = Variable.Sum(temp);
                    using (Variable.IfNot(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(noiseLessY[d, n], noisePrecision);
                    using (Variable.If(isMissing[d, n]))
                        observedData[d, n] = Variable.GaussianFromMeanAndPrecision(0, 1);
                }
            observedData.ObservedValue = data;
            var nodeFunctionsInit = Enumerable.Range(0, Q).Select(i =>
                                                                  VectorGaussian.FromMeanAndVariance(
                                                                      VectorGaussian.Sample(Vector.Zero(N.ObservedValue), PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100)),
                                                                      PositiveDefiniteMatrix.IdentityScaledBy(N.ObservedValue, 100))).ToArray(); // should put this manually in generated code

            var distArray = Distribution <Vector> .Array(nodeFunctionsInit);

            var nodeFunctionsInitVar = Variable.Observed(distArray).Named("nodeFunctionsInitVar");

            nodeFunctions.InitialiseTo(nodeFunctionsInitVar);

            modelBlock.CloseBlock();

            toInfer.AddRange(new List <IVariable>()
            {
                ev, noiseLessY, noisePrecision, nodeFunctionValues, nodeSignalPrecisions, nodeFunctionValuesPredictive, weights
            });

            var ie = new InferenceEngine(new VariationalMessagePassing());

            ie.ModelName = "SPLFM";
            var ca = ie.GetCompiledInferenceAlgorithm(toInfer.ToArray());

            ca.Execute(100);
            var fvals      = ca.Marginal <Gaussian[][]>(nodeFunctionValues.NameInGeneratedCode)[0]; // [q][n]
            var x          = inputs.Select(i => i[0]).ToArray();
            var mplWrapper = new MatplotlibWrapper();

            mplWrapper.AddArray("x", x);
            mplWrapper.AddArray("y", fvals.Select(i => i.GetMean()).ToArray());
            mplWrapper.AddArray("s", fvals.Select(i => Math.Sqrt(i.GetVariance())).ToArray());

            mplWrapper.Plot(new string[] {
                "fill_between(x,y-s,y+s,color=\"gray\")",
                "ylabel(\"node (fitted)\")"
            });
        }