Exemplo n.º 1
0
        public void GPClassificationTest2()
        {
            bool[]   yData = new bool[] { false, true, false };
            double[] xData = new double[]
            {
                -1.555555555555556, -0.2222222222222223, 1.555555555555555
            };
            Vector[]             xVec     = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[]             basis    = new Vector[] { Vector.Zero(1) };
            IKernelFunction      kf       = new SquaredExponential(0.0);
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <bool> y = Variable.Array <bool>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = (h[item] > 0);
            block.CloseBlock();

            InferenceEngine engine        = new InferenceEngine();
            SparseGP        sgp           = engine.Infer <SparseGP>(f);
            Vector          alphaExpected = Vector.FromArray(new double[] { 0.573337393823702 });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
            double[] xTest = new double[]
            {
                -2, -1, 0.0
            };
            Vector[] xTestVec  = Array.ConvertAll(xTest, v => Vector.Constant(1, v));
            double[] yMeanTest = new double[]
            {
                0.077592778583272, 0.347746707713812, 0.573337393823702
            };
            double[] yVarTest = new double[]
            {
                0.986784459962251, 0.734558782611933, 0.278455962249970
            };
            for (int i = 0; i < xTestVec.Length; i++)
            {
                Gaussian pred         = sgp.Marginal(xTestVec[i]);
                Gaussian predExpected = new Gaussian(yMeanTest[i], yVarTest[i]);
                Console.WriteLine("f({0}) = {1} should be {2}", xTest[i], pred, predExpected);
                Assert.True(predExpected.MaxDiff(pred) < 1e-4);
            }
            double evExpected = -2.463679892165236;
            double evActual   = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-6) < 1e-4);
        }
Exemplo n.º 2
0
        public void GPClassificationTest1()
        {
            bool[]               yData    = new bool[] { true };
            double[]             xData    = new double[] { -0.2222222222222223 };
            Vector[]             xVec     = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[]             basis    = new Vector[] { Vector.Zero(1) };
            IKernelFunction      kf       = new SquaredExponential(0.0);
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <bool> y = Variable.Array <bool>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = (h[item] > 0);
            block.CloseBlock();

            InferenceEngine engine        = new InferenceEngine();
            SparseGP        sgp           = engine.Infer <SparseGP>(f);
            Vector          alphaExpected = Vector.FromArray(new double[] { 0.778424938343491 });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
            double[] xTest = new double[]
            {
                -2, -1, 0.0
            };
            Vector[] xTestVec  = Array.ConvertAll(xTest, v => Vector.Constant(1, v));
            double[] yMeanTest = new double[]
            {
                0.105348359509159, 0.472138591390244, 0.778424938343491
            };
            double[] yVarTest = new double[]
            {
                0.988901723148729, 0.777085150520037, 0.394054615364932
            };
            for (int i = 0; i < xTestVec.Length; i++)
            {
                Gaussian pred         = sgp.Marginal(xTestVec[i]);
                Gaussian predExpected = new Gaussian(yMeanTest[i], yVarTest[i]);
                Console.WriteLine("f({0}) = {1} should be {2}", xTest[i], pred, predExpected);
                Assert.True(predExpected.MaxDiff(pred) < 1e-4);
            }
            double evExpected = -0.693147180559945;
            double evActual   = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-6) < 1e-4);
        }
Exemplo n.º 3
0
 /// <summary>EP message to <c>y</c>.</summary>
 /// <param name="func">Incoming message from <c>func</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
 /// <param name="x">Constant value for <c>x</c>.</param>
 /// <returns>The outgoing EP message to the <c>y</c> argument.</returns>
 /// <remarks>
 ///   <para>The outgoing message is a distribution matching the moments of <c>y</c> as the random arguments are varied. The formula is <c>proj[p(y) sum_(func) p(func) factor(y,func,x)]/p(y)</c>.</para>
 /// </remarks>
 /// <exception cref="ImproperMessageException">
 ///   <paramref name="func" /> is not a proper distribution.</exception>
 public static Gaussian YAverageConditional([SkipIfUniform] SparseGP func, Vector x)
 {
     return(func.Marginal(x));
 }
Exemplo n.º 4
0
        internal void HeteroscedasticGPR()
        {
            // This model is based on the paper "Most Likely Heteroscedastic Gaussian Process Regression" by Kersting et al, ICML 2007
            // Silverman's motorcycle benchmark dataset
            double[] inputs = new double[]
            {
                2.4, 2.6, 3.2, 3.6, 4, 6.2, 6.6, 6.8, 7.8, 8.199999999999999, 8.800000000000001, 8.800000000000001,
                9.6, 10, 10.2, 10.6, 11, 11.4, 13.2, 13.6, 13.8, 14.6, 14.6, 14.6, 14.6, 14.6, 14.6, 14.8, 15.4, 15.4,
                15.4, 15.4, 15.6, 15.6, 15.8, 15.8, 16, 16, 16.2, 16.2, 16.2, 16.4, 16.4, 16.6, 16.8, 16.8, 16.8, 17.6,
                17.6, 17.6, 17.6, 17.8, 17.8, 18.6, 18.6, 19.2, 19.4, 19.4, 19.6, 20.2, 20.4, 21.2, 21.4, 21.8, 22, 23.2,
                23.4, 24, 24.2, 24.2, 24.6, 25, 25, 25.4, 25.4, 25.6, 26, 26.2, 26.2, 26.4, 27, 27.2, 27.2, 27.2, 27.6,
                28.2, 28.4, 28.4, 28.6, 29.4, 30.2, 31, 31.2, 32, 32, 32.8, 33.4, 33.8, 34.4, 34.8, 35.2, 35.2, 35.4, 35.6,
                35.6, 36.2, 36.2, 38, 38, 39.2, 39.4, 40, 40.4, 41.6, 41.6, 42.4, 42.8, 42.8, 43, 44, 44.4, 45, 46.6, 47.8,
                47.8, 48.8, 50.6, 52, 53.2, 55, 55, 55.4, 57.6
            };
            double[] outputs = new double[]
            {
                0, -1.3, -2.7, 0, -2.7, -2.7, -2.7, -1.3, -2.7, -2.7, -1.3, -2.7, -2.7, -2.7, -5.4,
                -2.7, -5.4, 0, -2.7, -2.7, 0, -13.3, -5.4, -5.4, -9.300000000000001, -16, -22.8, -2.7, -22.8, -32.1, -53.5,
                -54.9, -40.2, -21.5, -21.5, -50.8, -42.9, -26.8, -21.5, -50.8, -61.7, -5.4, -80.40000000000001, -59, -71,
                -91.09999999999999, -77.7, -37.5, -85.59999999999999, -123.1, -101.9, -99.09999999999999, -104.4, -112.5,
                -50.8, -123.1, -85.59999999999999, -72.3, -127.2, -123.1, -117.9, -134, -101.9, -108.4, -123.1, -123.1, -128.5,
                -112.5, -95.09999999999999, -81.8, -53.5, -64.40000000000001, -57.6, -72.3, -44.3, -26.8, -5.4, -107.1, -21.5,
                -65.59999999999999, -16, -45.6, -24.2, 9.5, 4, 12, -21.5, 37.5, 46.9, -17.4, 36.2, 75, 8.1, 54.9, 48.2, 46.9,
                16, 45.6, 1.3, 75, -16, -54.9, 69.59999999999999, 34.8, 32.1, -37.5, 22.8, 46.9, 10.7, 5.4, -1.3, -21.5, -13.3,
                30.8, -10.7, 29.4, 0, -10.7, 14.7, -1.3, 0, 10.7, 10.7, -26.8, -14.7, -13.3, 0, 10.7, -14.7, -2.7, 10.7, -2.7, 10.7
            };
            Range j = new Range(inputs.Length);

            Vector[] inputsVec       = Util.ArrayInit(inputs.Length, i => Vector.FromArray(inputs[i]));
            VariableArray <Vector> x = Variable.Observed(inputsVec, j).Named("x");
            VariableArray <double> y = Variable.Observed(outputs, j).Named("y");
            // Set up the GP prior, which will be filled in later
            Variable <SparseGP> prior  = Variable.New <SparseGP>().Named("prior");
            Variable <SparseGP> prior2 = Variable.New <SparseGP>().Named("prior2");

            // The sparse GP variable - a distribution over functions
            Variable <IFunction> f = Variable <IFunction> .Random(prior).Named("f");

            Variable <IFunction> r = Variable <IFunction> .Random(prior2).Named("r");

            Variable <double> mean        = Variable.FunctionEvaluate(f, x[j]).Named("mean");
            Variable <double> logVariance = Variable.FunctionEvaluate(r, x[j]).Named("logVariance");
            Variable <double> variance    = Variable.Exp(logVariance);

            y[j] = Variable.GaussianFromMeanAndVariance(mean, variance);


            InferenceEngine engine = new InferenceEngine();
            GaussianProcess gp     = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0));
            GaussianProcess gp2    = new GaussianProcess(new ConstantFunction(0), new SquaredExponential(0));

            // Fill in the sparse GP prior
            //Vector[] basis = Util.ArrayInit(120, i => Vector.FromArray(0.5*i));
            Vector[] basis = Util.ArrayInit(60, i => Vector.FromArray(1.0 * i));
            prior.ObservedValue  = new SparseGP(new SparseGPFixed(gp, basis));
            prior2.ObservedValue = new SparseGP(new SparseGPFixed(gp2, basis));
            // Infer the posterior Sparse GP
            SparseGP sgp = engine.Infer <SparseGP>(f);

            // Check that training set is classified correctly
            Console.WriteLine();
            Console.WriteLine("Predictions on training set:");
            for (int i = 0; i < outputs.Length; i++)
            {
                Gaussian post = sgp.Marginal(inputsVec[i]);
                //double postMean = post.GetMean();
                Console.WriteLine("f({0}) = {1}", inputs[i], post);
            }
            //TODO: change path for cross platform using
            using (MatlabWriter writer = new MatlabWriter(@"..\..\HGPR.mat"))
            {
                int      n = outputs.Length;
                double[] m = new double[n];
                double[] s = new double[n];
                for (int i = 0; i < n; i++)
                {
                    Gaussian post = sgp.Marginal(inputsVec[i]);
                    double   mi, vi;
                    post.GetMeanAndVariance(out mi, out vi);
                    m[i] = mi;
                    s[i] = System.Math.Sqrt(vi);
                }
                writer.Write("mean", m);
                writer.Write("std", s);
            }
        }
Exemplo n.º 5
0
        public void GPClassificationTest()
        {
            bool[]   yData = new bool[] { false, false, false, true, true, true, true, false, false, false };
            double[] xData = new double[]
            {
                -2, -1.555555555555556, -1.111111111111111, -0.6666666666666667, -0.2222222222222223, 0.2222222222222223, 0.6666666666666665, 1.111111111111111,
                1.555555555555555, 2
            };
            Vector[] xVec  = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[] basis = new Vector[] { xVec[1], xVec[4], xVec[8] };
            //basis = xVec;
            IKernelFunction      kf       = new SquaredExponential(0.0);
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <bool> y = Variable.Array <bool>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = (h[item] > 0);
            block.CloseBlock();

            InferenceEngine engine        = new InferenceEngine();
            SparseGP        sgp           = engine.Infer <SparseGP>(f);
            Vector          alphaExpected = Vector.FromArray(new double[] { -1.410457563120709, 1.521306076273262, -1.008600221619413 });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
            double[] xTest = new double[]
            {
                -2, -1, 0.0
            };
            Vector[] xTestVec = Array.ConvertAll(xTest, v => Vector.Constant(1, v));
            // computed by matlab/MNT/GP/test_gpc.m
            double[] yMeanTest = new double[]
            {
                -0.966351175090184, -0.123034591744284, 0.762757400008960
            };
            double[] yVarTest = new double[]
            {
                0.323871157983366, 0.164009511251333, 0.162068482365962
            };
            for (int i = 0; i < xTestVec.Length; i++)
            {
                Gaussian pred         = sgp.Marginal(xTestVec[i]);
                Gaussian predExpected = new Gaussian(yMeanTest[i], yVarTest[i]);
                Console.WriteLine("f({0}) = {1} should be {2}", xTest[i], pred, predExpected);
                Assert.True(predExpected.MaxDiff(pred) < 1e-4);
            }
            double evExpected = -4.907121241357144;
            double evActual   = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-6) < 1e-4);
        }
Exemplo n.º 6
0
        public void GPClassificationExample()
        {
            // The data
            bool[]   ydata = { true, true, false, true, false, false };
            Vector[] xdata = new Vector[]
            {
                Vector.FromArray(new double[2] {
                    0, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 1
                }),
                Vector.FromArray(new double[2] {
                    1, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 0.5
                }),
                Vector.FromArray(new double[2] {
                    1.5, 0
                }),
                Vector.FromArray(new double[2] {
                    0.5, 1.0
                })
            };

            // Open an evidence block to allow model scoring
            Variable <bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock         block    = Variable.If(evidence);

            // Set up the GP prior, which will be filled in later
            Variable <SparseGP> prior = Variable.New <SparseGP>().Named("prior");

            // The sparse GP variable - a distribution over functions
            Variable <IFunction> f = Variable <IFunction> .Random(prior).Named("f");

            // The locations to evaluate the function
            VariableArray <Vector> x = Variable.Constant(xdata).Named("x");
            Range j = x.Range.Named("j");

            // The observation model
            VariableArray <bool> y = Variable.Array <bool>(j).Named("y");

            y[j] = (Variable.GaussianFromMeanAndVariance(Variable.FunctionEvaluate(f, x[j]), 0.1) > 0);

            // Attach the observations
            y.ObservedValue = ydata;

            // Close the evidence block
            block.CloseBlock();

            InferenceEngine engine = new InferenceEngine();

            // The basis
            Vector[] basis = new Vector[]
            {
                Vector.FromArray(new double[2] {
                    0.2, 0.2
                }),
                Vector.FromArray(new double[2] {
                    0.2, 0.8
                }),
                Vector.FromArray(new double[2] {
                    0.8, 0.2
                }),
                Vector.FromArray(new double[2] {
                    0.8, 0.8
                })
            };

            for (int trial = 0; trial < 3; trial++)
            {
                // The kernel
                IKernelFunction kf;
                if (trial == 0)
                {
                    kf = new SquaredExponential(-0.0);
                    //kf = new LinearKernel(new double[] { 0.0, 0.0 });
                }
                else if (trial == 1)
                {
                    kf = new SquaredExponential(-0.5);
                }
                else
                {
                    kf = new NNKernel(new double[] { 0.0, 0.0 }, -1.0);
                }

                // Fill in the sparse GP prior
                prior.ObservedValue = new SparseGP(new SparseGPFixed(kf, basis));

                // Model score
                double NNscore = engine.Infer <Bernoulli>(evidence).LogOdds;
                Console.WriteLine("{0} evidence = {1}", kf, NNscore.ToString("g4"));
            }

            // Infer the posterior Sparse GP
            SparseGP sgp = engine.Infer <SparseGP>(f);

            // Check that training set is classified correctly
            Console.WriteLine();
            Console.WriteLine("Predictions on training set:");
            for (int i = 0; i < ydata.Length; i++)
            {
                Gaussian post     = sgp.Marginal(xdata[i]);
                double   postMean = post.GetMean();
                string   comment  = (ydata[i] == (postMean > 0.0)) ? "correct" : "incorrect";
                Console.WriteLine("f({0}) = {1} ({2})", xdata[i], post, comment);
                Assert.True(ydata[i] == (postMean > 0.0));
            }
        }
Exemplo n.º 7
0
        public void GPRegressionTest()
        {
            double[] yData = new double[]
            {
                -0.06416828853982412, -0.6799959810206935, -0.4541652863622044, 0.155770359928991, 1.036659040456137, 0.7353821980830825, 0.8996680933259047,
                -0.05368704705684217, -0.7905775695015919, -0.1436284683992815
            };
            double[] xData = new double[]
            {
                -2, -1.555555555555556, -1.111111111111111, -0.6666666666666667, -0.2222222222222223, 0.2222222222222223, 0.6666666666666665, 1.111111111111111,
                1.555555555555555, 2
            };
            Vector[]             xVec     = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[]             basis    = new Vector[] { xVec[1], xVec[4], xVec[8] };
            IKernelFunction      kf       = new SquaredExponential(System.Math.Log(2.0));
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <double> y = Variable.Array <double>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = Variable.GaussianFromMeanAndVariance(h[item], 0.1);
            block.CloseBlock();

            InferenceEngine        engine        = new InferenceEngine();
            SparseGP               sgp           = engine.Infer <SparseGP>(f);
            Vector                 alphaExpected = Vector.FromArray(new double[] { -3.250044160725389, 4.579296091435270, -2.227005562666341 });
            PositiveDefiniteMatrix betaExpected  = new PositiveDefiniteMatrix(new double[, ]
            {
                { 3.187555652658986, -3.301824438047169, 1.227566907279797 },
                { -3.30182443804717, 5.115027119603418, -2.373085083966294 },
                { 1.227566907279797, -2.373085083966294, 2.156308696222915 }
            });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
            Console.WriteLine(StringUtil.JoinColumns("beta = ", sgp.Beta, " should be ", betaExpected));
            double[] xTest = new double[]
            {
                -2, -1, 0.0
            };
            Vector[] xTestVec = Array.ConvertAll(xTest, v => Vector.Constant(1, v));
            // computed by matlab/MNT/GP/test_gpr.m
            double[] yMeanTest = new double[]
            {
                -0.544583265595561, 0.134323399801302, 0.503623822120711
            };
            double[] yVarTest = new double[]
            {
                0.058569682375201, 0.022695532903985, 0.024439582002951
            };
            for (int i = 0; i < xTestVec.Length; i++)
            {
                Gaussian pred         = sgp.Marginal(xTestVec[i]);
                Gaussian predExpected = new Gaussian(yMeanTest[i], yVarTest[i]);
                Console.WriteLine("f({0}) = {1} should be {2}", xTest[i], pred, predExpected);
                Assert.True(predExpected.MaxDiff(pred) < 1e-4);
            }
            double evExpected = -13.201173794945003;
            double evActual   = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-6) < 1e-4);
        }
        public void Run()
        {
            InferenceEngine engine = new InferenceEngine();

            if (!(engine.Algorithm is Algorithms.ExpectationPropagation))
            {
                Console.WriteLine("This example only runs with Expectation Propagation");
                return;
            }

            // The data
            Vector[] inputs = new Vector[]
            {
                Vector.FromArray(new double[2] {
                    0, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 1
                }),
                Vector.FromArray(new double[2] {
                    1, 0
                }),
                Vector.FromArray(new double[2] {
                    0, 0.5
                }),
                Vector.FromArray(new double[2] {
                    1.5, 0
                }),
                Vector.FromArray(new double[2] {
                    0.5, 1.0
                })
            };

            bool[] outputs = { true, true, false, true, false, false };

            // Open an evidence block to allow model scoring
            Variable <bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock         block    = Variable.If(evidence);

            // Set up the GP prior, a distribution over functions, which will be filled in later
            Variable <SparseGP> prior = Variable.New <SparseGP>().Named("prior");

            // The sparse GP variable - a random function
            Variable <IFunction> f = Variable <IFunction> .Random(prior).Named("f");

            // The locations to evaluate the function
            VariableArray <Vector> x = Variable.Observed(inputs).Named("x");
            Range j = x.Range.Named("j");

            // The observation model
            VariableArray <bool> y     = Variable.Observed(outputs, j).Named("y");
            Variable <double>    score = Variable.FunctionEvaluate(f, x[j]).Named("score");

            y[j] = (Variable.GaussianFromMeanAndVariance(score, 0.1) > 0);

            // Close the evidence block
            block.CloseBlock();

            // The basis
            Vector[] basis = new Vector[]
            {
                Vector.FromArray(new double[2] {
                    0.2, 0.2
                }),
                Vector.FromArray(new double[2] {
                    0.2, 0.8
                }),
                Vector.FromArray(new double[2] {
                    0.8, 0.2
                }),
                Vector.FromArray(new double[2] {
                    0.8, 0.8
                })
            };

            for (int trial = 0; trial < 3; trial++)
            {
                // The kernel
                IKernelFunction kf;
                if (trial == 0)
                {
                    kf = new SquaredExponential(-0.0);
                }
                else if (trial == 1)
                {
                    kf = new SquaredExponential(-0.5);
                }
                else
                {
                    kf = new NNKernel(new double[] { 0.0, 0.0 }, -1.0);
                }

                // Fill in the sparse GP prior
                GaussianProcess gp = new GaussianProcess(new ConstantFunction(0), kf);
                prior.ObservedValue = new SparseGP(new SparseGPFixed(gp, basis));

                // Model score
                double NNscore = engine.Infer <Bernoulli>(evidence).LogOdds;
                Console.WriteLine("{0} evidence = {1}", kf, NNscore.ToString("g4"));
            }

            // Infer the posterior Sparse GP
            SparseGP sgp = engine.Infer <SparseGP>(f);

            // Check that training set is classified correctly
            Console.WriteLine("");
            Console.WriteLine("Predictions on training set:");
            for (int i = 0; i < outputs.Length; i++)
            {
                Gaussian post     = sgp.Marginal(inputs[i]);
                double   postMean = post.GetMean();
                string   comment  = (outputs[i] == (postMean > 0.0)) ? "correct" : "incorrect";
                Console.WriteLine("f({0}) = {1} ({2})", inputs[i], post, comment);
            }
        }