public SimpleBayesPointMachine(int numOfFeatures, int[] featureSelection, double noise)
            : base(numOfFeatures, featureSelection, noise)
        {
            this.isTrained = false;

            Engine = new InferenceEngine();
        }
Exemple #2
0
 internal Context(InferenceEngine mind, WildcardTriplet rootWildcard)
 {
     _mind = mind;
     _rootLevel = getState(null, rootWildcard);
     _levelsToExpand.Enqueue(_rootLevel);
     _levelsToGenerate.Enqueue(_rootLevel);
 }
        //public int[] Classification(Vector[] features, int[] labels)
        //{
        //}
        public double[] Regression(Vector[] features, double[] values)
        {
            var wMeans = Variable.Vector(Vector.Zero(features[0].Count).ToArray());
            var wPrecision = Variable.WishartFromShapeAndRate(100, PositiveDefiniteMatrix.IdentityScaledBy(features[0].Count, 0.01));
            var w = Variable.VectorGaussianFromMeanAndPrecision(wMeans, wPrecision).Named("w");
            var numItems = Variable.New<int>().Named("numItems");
            var i = new Range(numItems).Named("i");
            i.AddAttribute(new Sequential());

            var noisePrecision = Variable.New<double>().Named("noisePrecision");

            var x = Variable.Array<Vector>(i).Named("x");
            var y = Variable.Array<double>(i).Named("y");

            using (Variable.ForEach(i))
            {
                y[i] = Variable.GaussianFromMeanAndPrecision(Variable.InnerProduct(w, x[i]), noisePrecision);
            }

            numItems.ObservedValue = features.Length;
            x.ObservedValue = features;
            y.ObservedValue = values;

            var engine = new InferenceEngine();
            engine.Compiler.UseSerialSchedules = true;
            engine.ShowProgress = false;
            var wPosterior = engine.Infer<VectorGaussian>(w);
            y.ClearObservedValue();
            w.ObservedValue = wPosterior.GetMean();
            var inferredValues = engine.Infer<IList<Gaussian>>(y);
            return inferredValues.Select(v => v.GetMean()).ToArray();
        }
Exemple #4
0
        private static void AngleScaleTest()
        {
            Variable<double> angle = Variable.GaussianFromMeanAndVariance(Math.PI / 6, 0.4 * 0.4);
            Variable<double> scaleX = Variable.GaussianFromMeanAndVariance(1.5, 2 * 2);
            Variable<double> scaleY = Variable.GaussianFromMeanAndVariance(1.5, 2 * 2);
            Variable<PositiveDefiniteMatrix> prec = Variable<PositiveDefiniteMatrix>.Factor(ShapeFactors.MatrixFromAngleScale, scaleX, scaleY, angle);

            Vector trueMean = Vector.Zero(2);

            Range pointRange = new Range(5000);
            VariableArray<Vector> points = Variable.Array<Vector>(pointRange);
            points[pointRange] = Variable.VectorGaussianFromMeanAndPrecision(trueMean, prec).ForEach(pointRange);

            PositiveDefiniteMatrix truePrec = ShapeFactors.MatrixFromAngleScale(2.0, 3.0, Math.PI / 5);
            Vector[] observedPoints = Util.ArrayInit(pointRange.SizeAsInt, i => VectorGaussian.Sample(trueMean, truePrec));

            points.ObservedValue = observedPoints;
            prec.AddAttribute(new PointEstimate());
            prec.AddAttribute(new MarginalPrototype(new Wishart(2)));

            InferenceEngine engine = new InferenceEngine();
            engine.Compiler.RequiredQuality = engine.Compiler.RecommendedQuality = QualityBand.Unknown;
            Console.WriteLine(engine.Infer(angle));
            Console.WriteLine(engine.Infer(scaleX));
            Console.WriteLine(engine.Infer(scaleY));
        }
        public SentimentIndex()
            : base("Sentiment Index")
        {
            numberOfTrainingItems = Variable.New<int>();
            var rangeOfTrainingItems = new Range(numberOfTrainingItems);
            trainingInputs = Variable.Array<Vector>(rangeOfTrainingItems);
            trainingOutputs = Variable.Array<bool>(rangeOfTrainingItems);

            weights = Variable.Random(new VectorGaussian(Vector.Zero(numberOfFeatures), PositiveDefiniteMatrix.Identity(numberOfFeatures)));

            using (Variable.ForEach(rangeOfTrainingItems))
            {
                trainingOutputs[rangeOfTrainingItems] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, trainingInputs[rangeOfTrainingItems]), noise));
            }

            trainingEngine = new InferenceEngine();
            trainingEngine.ShowProgress = false;

            numberOfTestingItems = Variable.New<int>();
            var rangeOfTestingItems = new Range(numberOfTestingItems);
            testingInputs = Variable.Array<Vector>(rangeOfTestingItems);
            testingOutputs = Variable.Array<bool>(rangeOfTestingItems);

            weightsPosteriorDistribution = Variable.New<VectorGaussian>();
            var testWeights = Variable<Vector>.Random(weightsPosteriorDistribution);

            using (Variable.ForEach(rangeOfTestingItems))
            {
                testingOutputs[rangeOfTestingItems] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(testWeights, testingInputs[rangeOfTestingItems]), noise));
            }

            testingEngine = new InferenceEngine();
            testingEngine.ShowProgress = false;
        }
		public void Run()
		{
			// data
			double[] incomes = { 63, 16, 28, 55, 22, 20 };
			double[] ages = { 38, 23, 40, 27, 18, 40 };
			bool[] willBuy = { true, false, true, true, false, false };

			// Create target y
			VariableArray<bool> y = Variable.Observed(willBuy).Named("y");
			Variable<Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(3),
				PositiveDefiniteMatrix.Identity(3))).Named("w");
			BayesPointMachine(incomes, ages, w, y);

			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is GibbsSampling))
			{
				VectorGaussian wPosterior = engine.Infer<VectorGaussian>(w);
				Console.WriteLine("Dist over w=\n"+wPosterior);

				double[] incomesTest = { 58, 18, 22 };
				double[] agesTest = { 36, 24, 37 };
				VariableArray<bool> ytest = Variable.Array<bool>(new Range(agesTest.Length)).Named("ytest");
				BayesPointMachine(incomesTest, agesTest, Variable.Random(wPosterior).Named("w"), ytest);
				Console.WriteLine("output=\n" + engine.Infer(ytest));
			}
			else Console.WriteLine("This model has a non-conjugate factor, and therefore cannot use Gibbs sampling");

		}
Exemple #7
0
        static void Main(string[] args)
        {
            // data
            ArrayList trainingData = readData(@"C:\Users\Jeremy\Documents\Visual Studio 2010\Projects\InferSandbox\train.txt");

            // Create target y
            VariableArray<double> y = Variable.Observed((double[])(trainingData[trainingData.Count-1])).Named("y");
            Variable<Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(trainingData.Count),
                PositiveDefiniteMatrix.Identity(trainingData.Count))).Named("w");
            trainingData.RemoveAt(trainingData.Count - 1);
            BayesPointMachine(trainingData, w, y);

            InferenceEngine engine = new InferenceEngine();
            if (!(engine.Algorithm is GibbsSampling))
            {
                VectorGaussian wPosterior = engine.Infer<VectorGaussian>(w);
                Console.WriteLine("Dist over w=\n" + wPosterior);

                ArrayList testData = readData(@"C:\Users\Jeremy\Documents\Visual Studio 2010\Projects\InferSandbox\test.txt");

                VariableArray<double> ytest = Variable.Array<double>(new Range(((double[])(testData[0])).Length)).Named("ytest");
                BayesPointMachine(testData, Variable.Random(wPosterior).Named("w"), ytest);
                Console.WriteLine("output=\n" + engine.Infer(ytest));
            }
            else Console.WriteLine("This model has a non-conjugate factor, and therefore cannot use Gibbs sampling");
        }
        public InferenceResult<Cluster[]> Infer(Vector[] observedData, int clusters)
        {
            var dimensions = observedData.First().Count;
            var evidence = Variable.Bernoulli(0.5).Named("evidence");
            var evidenceBlock = Variable.If(evidence);
            var clustersRange = new Range(clusters).Named("clustersRange");
            var meansPrior = Variable.Array<Vector>(clustersRange).Named("meansPrior");
            meansPrior[clustersRange] = Variable
                .VectorGaussianFromMeanAndPrecision(
                    Vector.Zero(dimensions),
                    PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01))
                .ForEach(clustersRange);

            var precisionsPrior = Variable.Array<PositiveDefiniteMatrix>(clustersRange).Named("precisionsPrior");
            precisionsPrior[clustersRange] = Variable.WishartFromShapeAndRate(100, PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01))
                .ForEach(clustersRange);

            var initialWeights = Enumerable.Range(0, clusters).Select(_ => 1.0).ToArray();
            var mixtureWeightsPrior = Variable.Dirichlet(clustersRange, initialWeights).Named("mixtureWeightsPrior");

            var dataRange = new Range(observedData.Length).Named("dataRange");
            var data = Variable.Array<Vector>(dataRange).Named("data");

            var latentIndex = Variable.Array<int>(dataRange).Named("latentIndex");

            using (Variable.ForEach(dataRange))
            {
                latentIndex[dataRange] = Variable.Discrete(mixtureWeightsPrior);
                using (Variable.Switch(latentIndex[dataRange]))
                {
                    data[dataRange] = Variable.VectorGaussianFromMeanAndPrecision(meansPrior[latentIndex[dataRange]], precisionsPrior[latentIndex[dataRange]]);
                }
            }

            var zinit = new Discrete[dataRange.SizeAsInt];
            for (int i = 0; i < zinit.Length; i++)
                zinit[i] = Discrete.PointMass(Rand.Int(clustersRange.SizeAsInt), clustersRange.SizeAsInt);
            latentIndex.InitialiseTo(Distribution<int>.Array(zinit));

            evidenceBlock.CloseBlock();

            data.ObservedValue = observedData;

            var ie = new InferenceEngine(new VariationalMessagePassing());
            ie.ShowProgress = false;

            var mixtureWeightsPosterior = ie.Infer(mixtureWeightsPrior);
            var meansPosterior = ie.Infer<VectorGaussian[]>(meansPrior);
            var precisionsPosterior = ie.Infer<Wishart[]>(precisionsPrior);
            var bEvidence = ie.Infer<Bernoulli>(evidence);

            var result = new List<Cluster>();
            for (var i = 0; i < clusters; i++)
            {
                result.Add(new Cluster(meansPosterior[i].GetMean(), precisionsPosterior[i].GetMean().Inverse()));
            }

            return new InferenceResult<Cluster[]>(bEvidence, result.ToArray());
        }
Exemple #9
0
		// Sample data from a DINA/NIDA model and then use Infer.NET to recover the parameters.
		public void Run()
		{
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			}

			bool useDina = true;
			Beta slipPrior = new Beta(1, 10);
			Beta guessPrior = new Beta(1, 10);
			Rand.Restart(0);
			int nStudents = 100;
			int nQuestions = 20;
			int nSkills = 3;
			int[][] skillsRequired = new int[nQuestions][];
			for (int q = 0; q < nQuestions; q++) {
				// each question requires a random set of skills
				int[] skills = Rand.Perm(nSkills);
				int n = Rand.Int(nSkills)+1;
				skillsRequired[q] = Util.ArrayInit(n, i => skills[i]);
				Console.WriteLine("skillsRequired[{0}] = {1}", q, Util.CollectionToString(skillsRequired[q]));
			}
			double[] pSkill, slip, guess;
			bool[][] hasSkill;
			VariableArray<double> slipVar, guessVar, pSkillVar;
			VariableArray<VariableArray<bool>,bool[][]> hasSkillVar;
			if (useDina) {
				bool[][] responses = DinaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				DinaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			} else {
				bool[][] responses = NidaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				NidaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			}

			engine.NumberOfIterations = 10;
			Bernoulli[][] hasSkillPost = engine.Infer<Bernoulli[][]>(hasSkillVar);
			int numErrors = 0;
			for (int i = 0; i < nStudents; i++) {
				for (int s = 0; s < nSkills; s++) {
					if (hasSkill[i][s] != (hasSkillPost[i][s].LogOdds > 0)) numErrors++;
				}
			}
			Console.WriteLine("{0:0}% of skills recovered correctly", 100.0 - 100.0*numErrors/(nStudents*nSkills));
			Beta[] pSkillPost = engine.Infer<Beta[]>(pSkillVar);
			Beta[] slipPost = engine.Infer<Beta[]>(slipVar);
			Beta[] guessPost = engine.Infer<Beta[]>(guessVar);
			for (int s = 0; s < nSkills; s++) {
				Console.WriteLine("pSkill[{0}] = {1} (sampled from {2})", s, pSkillPost[s], pSkill[s].ToString("g4"));				
			}
			for (int i = 0; i < Math.Min(3,slipPost.Length); i++)	{
				Console.WriteLine("slip[{0}] = {1} (sampled from {2})", i, slipPost[i], slip[i].ToString("g4"));			 
			}
			for (int i = 0; i < Math.Min(3,guessPost.Length); i++) {
				Console.WriteLine("guess[{0}] = {1} (sampled from {2})", i, guessPost[i], guess[i].ToString("g4"));
			}
		}
Exemple #10
0
		public virtual void CreateModel()
		{
			AverageTimePrior = Variable.New<Gaussian>();
			TrafficNoisePrior = Variable.New<Gamma>();
			AverageTime = Variable.Random<double, Gaussian>(AverageTimePrior);
			TrafficNoise = Variable.Random<double, Gamma>(TrafficNoisePrior);
			if (InferenceEngine == null)
			{
				InferenceEngine = new InferenceEngine();
			}
		}
        public override void Run()
        {
            var prices = GetPrices("SPY").Select(t => t.Item2).ToArray();
            var inputs = prices.Take(prices.Length - 1).ToArray();
            var inputVectors = inputs.Select(i => Vector.FromArray(new[] { i })).ToArray();
            var outputs = prices.Skip(1).ToArray();

            // Set up the GP prior, which will be filled in later
            var prior = Variable.New<SparseGP>().Named("prior");

            // The sparse GP variable - a distribution over functions
            var f = Variable<IFunction>.Random(prior).Named("f");

            // The locations to evaluate the function
            var x = Variable.Observed(inputVectors).Named("x");
            var j = x.Range.Named("j");
            var y = Variable.Observed(outputs, j).Named("y");
            y[j] = Variable.GaussianFromMeanAndVariance(Variable.FunctionEvaluate(f, x[j]), 0.1);
            var kf = new SquaredExponential(-0.5);

            // The basis
            var rand = new Random(Environment.TickCount);
            var basis = Enumerable.Range(1, 10).Select(i => Vector.FromArray(new double[1] {i*10})).ToArray();
            //var basis = new Vector[] {
            //    Vector.FromArray(new double[1] {80}),
            //    Vector.FromArray(new double[1] {90}),
            //    Vector.FromArray(new double[1] {100})
            //};

            var gp = new GaussianProcess(new ConstantFunction(0), kf);

            prior.ObservedValue = new SparseGP(new SparseGPFixed(gp, basis));
            var engine = new InferenceEngine(new ExpectationPropagation());
            var sgp = engine.Infer<SparseGP>(f);

            var means = sgp.Mean(inputVectors).ToArray();
            var stdDevs = inputVectors.Select(iv => Math.Sqrt(sgp.Variance(iv))).ToArray();

            this.Series = new[]
            {
                new LabelledSeries<Tuple<double,double>>(
                    "input",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, inputs[i]))),
                new LabelledSeries<Tuple<double,double>>(
                    "infered mean",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, means[i]))),
                new LabelledSeries<Tuple<double,double>>(
                    "infered stddev",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, stdDevs[i]))),
            };
        }
Exemple #12
0
		public void CreateModel()
		{
			CommonEngine = new InferenceEngine();

			cyclist1 = new CyclistPrediction() { InferenceEngine = CommonEngine };
			cyclist1.CreateModel();
			cyclist2 = new CyclistPrediction() { InferenceEngine = CommonEngine };
			cyclist2.CreateModel();

			TimeDifference = cyclist1.TomorrowsTime - cyclist2.TomorrowsTime;
			Cyclist1IsFaster = cyclist1.TomorrowsTime < cyclist2.TomorrowsTime;
		}
Exemple #13
0
		public Inference(int numRanks)
		{
			nRanks = numRanks;
			probNextIfNotClick = Variable.New<double>();
			probNextIfClickNotRel = Variable.New<double>();
			probNextIfClickRel = Variable.New<double>();
			nUsers = Variable.New<int>();
			Range u = new Range(nUsers);

			appeal = new Variable<double>[nRanks];
			relevance = new Variable<double>[nRanks];
			examine = new VariableArray<bool>[nRanks];
			click = new VariableArray<bool>[nRanks];
			isRel = new VariableArray<bool>[nRanks];

			// user independent variables
			for (int d = 0; d < nRanks; d++)
			{
				appeal[d] = Variable.Beta(1, 1);
				relevance[d] = Variable.Beta(1, 1);
			}

			// Main model code
			for (int d = 0; d < nRanks; d++)
			{
				examine[d] = Variable.Array<bool>(u);
				click[d] = Variable.Array<bool>(u);
				isRel[d] = Variable.Array<bool>(u);
				if (d == 0)
					examine[d][u] = Variable.Bernoulli(1).ForEach(u);
				else
					using (Variable.ForEach(u))
					{
						var nextIfClick = Variable.New<bool>();
						using (Variable.If(isRel[d-1][u]))
							nextIfClick.SetTo(Variable.Bernoulli(probNextIfClickRel));
						using (Variable.IfNot(isRel[d-1][u]))
							nextIfClick.SetTo(Variable.Bernoulli(probNextIfClickNotRel));
						var nextIfNotClick = Variable.Bernoulli(probNextIfNotClick);
						var next = 
							(((!click[d - 1][u]) & nextIfNotClick) | (click[d - 1][u]  & nextIfClick));
						examine[d][u] = examine[d - 1][u] & next;
								  
					}

				using (Variable.ForEach(u))
				{
					click[d][u] = examine[d][u] & Variable.Bernoulli(appeal[d]);
					isRel[d][u] = click[d][u] & Variable.Bernoulli(relevance[d]);
				}
			}
			ie = new InferenceEngine();
		}
		public void Run()
		{
			// Define a range for the number of mixture components
			Range k = new Range(2).Named("k");

			// Mixture component means
			VariableArray<Vector> means = Variable.Array<Vector>(k).Named("means");			
			means[k] = Variable.VectorGaussianFromMeanAndPrecision(
				Vector.FromArray(0.0,0.0),
				PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k);
	
			// Mixture component precisions
			VariableArray<PositiveDefiniteMatrix> precs = Variable.Array<PositiveDefiniteMatrix>(k).Named("precs");
			precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k);
			
			// Mixture weights 
			Variable<Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights");	

			// Create a variable array which will hold the data
			Range n = new Range(300).Named("n");
			VariableArray<Vector> data = Variable.Array<Vector>(n).Named("x");
			// Create latent indicator variable for each data point
			VariableArray<int> z = Variable.Array<int>(n).Named("z");

			// The mixture of Gaussians model
			using (Variable.ForEach(n)) {
				z[n] = Variable.Discrete(weights);
				using (Variable.Switch(z[n])) {
					data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
				}
			}

			// Attach some generated data
			data.ObservedValue = GenerateData(n.SizeAsInt);

			// Initialise messages randomly so as to break symmetry
			Discrete[] zinit = new Discrete[n.SizeAsInt];		
			for (int i = 0; i < zinit.Length; i++) 
			  zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt);
			z.InitialiseTo(Distribution<int>.Array(zinit)); 

			// The inference
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("Dist over pi=" + ie.Infer(weights));
				Console.WriteLine("Dist over means=\n" + ie.Infer(means));
				Console.WriteLine("Dist over precs=\n" + ie.Infer(precs));
			}
			else
				Console.WriteLine("This example is not supported by Expectation Propagation");
		}
		public void Run()
		{
			for (double thresh = 0; thresh <= 1; thresh += 0.1)
			{
				Variable<double> x = Variable.GaussianFromMeanAndVariance(0, 1).Named("x");
				Variable.ConstrainTrue(x > thresh);
				InferenceEngine engine = new InferenceEngine();
				if (engine.Algorithm is ExpectationPropagation)
					Console.WriteLine("Dist over x given thresh of " + thresh + "=" + engine.Infer(x));
				else
					Console.WriteLine("This example only runs with Expectation Propagation");
			}
		}
Exemple #16
0
 public static VectorGaussian Conditional(Tuple<VectorGaussian, Wishart> priors, int observedIndex, double observedValue)
 {
     Variable<Vector> mean = Variable.Random(priors.Item1);
     Variable<PositiveDefiniteMatrix> prec = Variable.Random(priors.Item2);
     Variable<Vector> v = Variable.VectorGaussianFromMeanAndPrecision(mean, prec);
     // Initialise v to a proper distribution (to avoid improper messages)
     v.InitialiseTo(new VectorGaussian(priors.Item1.GetMean(), priors.Item2.GetMean()));
     Variable<double> observedV = Variable.GetItem(v, observedIndex);
     observedV.ObservedValue = observedValue;
     var engine = new InferenceEngine(new VariationalMessagePassing());
     var vPosterior = engine.Infer<VectorGaussian>(v);
     return vPosterior;
 }
Exemple #17
0
 //Addition by Guy Templeton, get log evidence from learned mixing coeff.
 public double GetLogEvidence()
 {
     Variable<bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
     Range classes = new Range(numOfClasses);
     IfBlock block = Variable.If(evidence);
     VectorGaussian[] wObserved = trainModel.wPrior.ObservedValue;
     VectorGaussian[] empty = Util.ArrayInit(numOfClasses, c => (c == 0) ?
         VectorGaussian.PointMass(Vector.Zero(numOfFeatures)) :
         VectorGaussian.FromMeanAndPrecision(Vector.Zero(numOfFeatures), PositiveDefiniteMatrix.Identity(numOfFeatures)));
     block.CloseBlock();
     InferenceEngine engine = new InferenceEngine();
     return engine.Infer<Bernoulli>(evidence).LogOdds;
 }
        public override void Run()
        {
            var rangeMin = -10;
            var interval = 0.1;
            var observationSize = 100;
            var aActual = 0.2;
            var bActual = 2.3;
            var rand = new System.Random();
            var actuals = Enumerable.Range(rangeMin, observationSize)
                    .Select(i => i * interval)
                    .Select(i => Tuple.Create((double) i, bActual * i + aActual))
                    .ToArray();
            var samples = actuals.Select(tuple => Tuple.Create(tuple.Item1, tuple.Item2 + ((rand.NextDouble() - 0.5) * 10))).ToArray();

            var series = new List<LabelledSeries<Tuple<double, double>>>();
            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Actual a+bx a={0} b={1}", aActual, bActual), actuals));

            var aPrior = Variable.GaussianFromMeanAndPrecision(0, 0.01).Named("aPrior");
            var bPrior = Variable.GaussianFromMeanAndPrecision(0, 0.01).Named("bPrior");
            var noisePrior = Variable.GammaFromShapeAndScale(1, 5).Named("noisePrior");
            var obsRange = new Range(samples.Length);
            var xArray = Variable.Array<double>(obsRange);
            var exprArray = Variable.Array<double>(obsRange);
            using (Variable.ForEach(obsRange))
            {
                exprArray[obsRange] = Variable.GaussianFromMeanAndPrecision(aPrior + xArray[obsRange] * bPrior, noisePrior);
            }

            xArray.ObservedValue = samples.Select(t => (double)t.Item1).ToArray();
            exprArray.ObservedValue = samples.Select(t => t.Item2).ToArray();

            var engine = new InferenceEngine();
            var aPosterior = engine.Infer<Gaussian>(aPrior);
            var bPosterior = engine.Infer<Gaussian>(bPrior);
            var noisePosterior = engine.Infer<Gamma>(noisePrior);

            var aInferred = aPosterior.GetMean();
            var bInferred = bPosterior.GetMean();
            var inferred = Enumerable.Range(rangeMin, observationSize)
                            .Select(i => i * interval)
                            .Select(i => Tuple.Create((double)i, bInferred * i + aInferred))
                            .ToArray();

            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Inferred a+bx a={0} b={1}", Math.Round(aInferred, 4), Math.Round(bInferred, 4)), inferred));

            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Data", aActual, bActual), samples) { IsScatter = true });

            this.Series = series.ToArray();
        }
Exemple #19
0
		public void Run()
		{
			Rand.Restart(12347);

			// The model
			int N = RatsHeightData.GetLength(0);
			int T = RatsHeightData.GetLength(1);
			Range r = new Range(N).Named("N");
			Range w = new Range(T).Named("T");

			Variable<double> alphaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("alphaC");
			Variable<double> alphaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("alphaTau");
			VariableArray<double> alpha = Variable.Array<double>(r).Named("alpha");
			alpha[r] = Variable.GaussianFromMeanAndPrecision(alphaC, alphaTau).ForEach(r);

			Variable<double> betaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("betaC");
			Variable<double> betaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("betaTau");
			VariableArray<double> beta = Variable.Array<double>(r).Named("beta");
			beta[r] = Variable.GaussianFromMeanAndPrecision(betaC, betaTau).ForEach(r);

			Variable<double> tauC = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("tauC");
			VariableArray<double> x = Variable.Observed<double>(RatsXData, w).Named("x");
			Variable<double> xbar = Variable.Sum(x)/T;
			VariableArray2D<double> y = Variable.Observed<double>(RatsHeightData, r, w).Named("y");
			y[r, w] = Variable.GaussianFromMeanAndPrecision(alpha[r] + (beta[r] * (x[w]-xbar)), tauC);
			Variable<double> alpha0 = (alphaC - betaC * xbar).Named("alpha0");

			// Initialise with the mean of the prior (needed for Gibbs to converge quickly)
			alphaC.InitialiseTo(Gaussian.PointMass(0.0));
			tauC.InitialiseTo(Gamma.PointMass(1.0));
			alphaTau.InitialiseTo(Gamma.PointMass(1.0));
			betaTau.InitialiseTo(Gamma.PointMass(1.0));

			// Inference engine
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is ExpectationPropagation))
			{
				Gaussian betaCMarg = ie.Infer<Gaussian>(betaC);
				Gaussian alpha0Marg = ie.Infer<Gaussian>(alpha0);
				Gamma tauCMarg = ie.Infer<Gamma>(tauC);

				// Inference
				Console.WriteLine("alpha0 = {0}[sd={1}]", alpha0Marg, Math.Sqrt(alpha0Marg.GetVariance()).ToString("g4"));
				Console.WriteLine("betaC = {0}[sd={1}]", betaCMarg, Math.Sqrt(betaCMarg.GetVariance()).ToString("g4"));
				Console.WriteLine("tauC = {0}", tauCMarg);
			}
			else
				Console.WriteLine("This example does not run with Expectation Propagation");
		}
Exemple #20
0
		public void Run()
		{
			Variable<bool> firstCoin = Variable.Bernoulli(0.5).Named("firstCoin");
			Variable<bool> secondCoin = Variable.Bernoulli(0.5).Named("secondCoin");
			Variable<bool> bothHeads  = (firstCoin & secondCoin).Named("bothHeads");
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is VariationalMessagePassing))
			{
				Console.WriteLine("Probability both coins are heads: "+ie.Infer(bothHeads));
				bothHeads.ObservedValue=false;
				Console.WriteLine("Probability distribution over firstCoin: " + ie.Infer(firstCoin));
			}
			else
				Console.WriteLine("This example does not run with Variational Message Passing");
		}
Exemple #21
0
		/// <summary>
		/// Model constructor
		/// </summary>
		public BayesianPCAModel()
		{
			// The various dimensions will be set externally...
			vN = Variable.New<int>().Named("NumObs");
			vD = Variable.New<int>().Named("NumFeats");
			vM = Variable.New<int>().Named("MaxComponents");
			rN = new Range(vN).Named("N");
			rD = new Range(vD).Named("D");
			rM = new Range(vM).Named("M");
			// ... as will the data
			vData = Variable.Array<double>(rN, rD).Named("data");
			// ... and the priors
			priorAlpha = Variable.New<Gamma>().Named("PriorAlpha");
			priorMu = Variable.New<Gaussian>().Named("PriorMu");
			priorPi = Variable.New<Gamma>().Named("PriorPi");
			// Mixing matrix. Each row is drawn from a Gaussian with zero mean and
			// a precision which will be learnt. This is a form of Automatic
			// Relevance Determination (ARD). The larger the precisions become, the
			// less important that row in the mixing matrix is in explaining the data
			vAlpha = Variable.Array<double>(rM).Named("Alpha");
			vW = Variable.Array<double>(rM, rD).Named("W");
			vAlpha[rM] = Variable.Random<double, Gamma>(priorAlpha).ForEach(rM);
			vW[rM, rD] = Variable.GaussianFromMeanAndPrecision(0, vAlpha[rM]).ForEach(rD);
			// Latent variables are drawn from a standard Gaussian
			vZ = Variable.Array<double>(rN, rM).Named("Z");
			vZ[rN, rM] = Variable.GaussianFromMeanAndPrecision(0.0, 1.0).ForEach(rN, rM);
			// Multiply the latent variables with the mixing matrix...
			vT = Variable.MatrixMultiply(vZ, vW).Named("T");
			// ... add in a bias ...
			vMu = Variable.Array<double>(rD).Named("mu");
			vMu[rD] = Variable.Random<double, Gaussian>(priorMu).ForEach(rD);
			vU = Variable.Array<double>(rN, rD).Named("U");
			vU[rN, rD] = vT[rN, rD] + vMu[rD];
			// ... and add in some observation noise ...
			vPi = Variable.Array<double>(rD).Named("pi");
			vPi[rD] = Variable.Random<double, Gamma>(priorPi).ForEach(rD);
			// ... to give the likelihood of observing the data
			vData[rN, rD] = Variable.GaussianFromMeanAndPrecision(vU[rN, rD], vPi[rD]);
			// Inference engine
			engine = new InferenceEngine();
			return;
		}
Exemple #22
0
        public static Tuple<VectorGaussian, Wishart> LearnGaussian(Vector[] obs)
        {
            int numData = obs.Length;
            int dim = obs[0].Count;
            Variable<Vector> mean = Variable.VectorGaussianFromMeanAndPrecision(
                Vector.Zero(dim),
                PositiveDefiniteMatrix.IdentityScaledBy(dim, 10.0)).Named("mean");
            Variable<PositiveDefiniteMatrix> prec = Variable.WishartFromShapeAndScale(
                100.0, PositiveDefiniteMatrix.IdentityScaledBy(dim, 0.01));
            Range n = new Range(obs.Length).Named("n");
            VariableArray<Vector> data = Variable.Array<Vector>(n).Named("x");
            data[n] = Variable.VectorGaussianFromMeanAndPrecision(mean, prec).ForEach(n);
            data.ObservedValue = obs;

            var engine = new InferenceEngine(new VariationalMessagePassing());
            var meanPosterior = engine.Infer<VectorGaussian>(mean);
            var precPosterior = engine.Infer<Wishart>(prec);

            return new Tuple<VectorGaussian, Wishart>(meanPosterior, precPosterior);
        }
		public static void RunCyclingTime1()
		{
			//[1] The model
			Variable<double> averageTime = Variable.GaussianFromMeanAndPrecision(15, 0.01);
			Variable<double> trafficNoise = Variable.GammaFromShapeAndScale(2.0, 0.5);

			Variable<double> travelTimeMonday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);
			Variable<double> travelTimeTuesday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);
			Variable<double> travelTimeWednesday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);

			//[2] Train the model
			travelTimeMonday.ObservedValue = 13;
			travelTimeTuesday.ObservedValue = 17;
			travelTimeWednesday.ObservedValue = 16;

			InferenceEngine engine = new InferenceEngine();

			Gaussian averageTimePosterior = engine.Infer<Gaussian>(averageTime);
			Gamma trafficNoisePosterior = engine.Infer<Gamma>(trafficNoise);

			Console.WriteLine("averageTimePosterior: " + averageTimePosterior);
			Console.WriteLine("trafficNoisePosterior: " + trafficNoisePosterior);

			//[3] Add a prediction variable and retrain the model
			Variable<double> tomorrowsTime = Variable.GaussianFromMeanAndPrecision(
				averageTime,
				trafficNoise);

			Gaussian tomorrowsTimeDist = engine.Infer<Gaussian>(tomorrowsTime);
			double tomorrowsMean = tomorrowsTimeDist.GetMean();
			double tomorrowsStdDev = Math.Sqrt(tomorrowsTimeDist.GetVariance());

			// Write out the results.
			Console.WriteLine("Tomorrows predicted time: {0:f2} plus or minus {1:f2}", tomorrowsMean, tomorrowsStdDev);

			// You can also ask other questions of the model 
			double probTripTakesLongerThan18Minutes = engine.Infer<Bernoulli>(tomorrowsTime < 18.0).GetProbTrue();
			Console.WriteLine(
				"Probability that the trip takes less than 18 min: {0:f2}",
				probTripTakesLongerThan18Minutes);
		}
		public void Run()
		{
			// Sample data from standard Gaussian
			double[] data = new double[100];
			for (int i = 0; i < data.Length; i++) data[i] = Rand.Normal(0, 1);

			// Create mean and precision random variables
			Variable<double> mean = Variable.GaussianFromMeanAndVariance(0, 100).Named("mean");
			Variable<double> precision = Variable.GammaFromShapeAndScale(1, 1).Named("precision");

			for (int i = 0; i < data.Length; i++)
			{
				Variable<double> x= Variable.GaussianFromMeanAndPrecision(mean, precision).Named("x"+i);
				x.ObservedValue=data[i];
			}

			InferenceEngine engine = new InferenceEngine();
			// Retrieve the posterior distributions
			Console.WriteLine("mean=" + engine.Infer(mean));
			Console.WriteLine("prec=" + engine.Infer(precision));
		}
        public override void Run()
        {
            var inputs = new[]
            {
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Star), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Ring), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 15 }),

                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Star), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Arrow), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Trapezium), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Diamond), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Triangle), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Ring), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Ellipse), new double[] { 15 }),
            };

            var outputs = new bool[] { true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false };

            var j = new Range(inputs.Length);
            var noise = Variable.GammaFromMeanAndVariance(1, 1);
            var X = Variable.Observed(inputs.Select(i => Vector.FromArray(i)).ToArray(), j).Named("X");
            var Y = Variable.Observed(outputs, j).Named("Y");
            var weights = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(inputs.First().Length), PositiveDefiniteMatrix.Identity(inputs.First().Length))
                .Named("weights");
            Y[j] = Variable.GaussianFromMeanAndPrecision(Variable.InnerProduct(X[j], weights), noise) > 0;
            var engine = new InferenceEngine();
            var posteriorWeightsDist = engine.Infer<VectorGaussian>(weights);
            var posteriorNoiseDist = engine.Infer<Gamma>(noise);
            weights = Variable.Random(posteriorWeightsDist);
            var testCase = JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Trapezium), new double[] { 15 });
            var testClassification = engine.Infer<Bernoulli>(Variable.InnerProduct(Vector.FromArray(testCase), weights) > 0);
        }
Exemple #26
0
		public void Run()
		{
			// Data from clinical trial
			VariableArray<bool> controlGroup = 
				Variable.Observed(new bool[] { false, false, true, false, false }).Named("controlGroup");
			VariableArray<bool> treatedGroup = 
				Variable.Observed(new bool[] { true, false, true, true, true }).Named("treatedGroup");
			Range i = controlGroup.Range.Named("i"); Range j = treatedGroup.Range.Named("j");

			// Prior on being effective treatment
			Variable<bool> isEffective = Variable.Bernoulli(0.5).Named("isEffective");
			Variable<double> probIfTreated, probIfControl;
			using (Variable.If(isEffective))
			{
				// Model if treatment is effective
				probIfControl = Variable.Beta(1, 1).Named("probIfControl");
				controlGroup[i] = Variable.Bernoulli(probIfControl).ForEach(i);
				probIfTreated = Variable.Beta(1, 1).Named("probIfTreated");
				treatedGroup[j] = Variable.Bernoulli(probIfTreated).ForEach(j);
			}
			using (Variable.IfNot(isEffective))
			{
				// Model if treatment is not effective
				Variable<double> probAll = Variable.Beta(1, 1).Named("probAll");
				controlGroup[i] = Variable.Bernoulli(probAll).ForEach(i);
				treatedGroup[j] = Variable.Bernoulli(probAll).ForEach(j);
			}
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is GibbsSampling))
			{
				Console.WriteLine("Probability treatment has an effect = " + ie.Infer(isEffective));
				Console.WriteLine("Probability of good outcome if given treatment = " 
										+ (float)ie.Infer<Beta>(probIfTreated).GetMean());
				Console.WriteLine("Probability of good outcome if control = " 
										+ (float)ie.Infer<Beta>(probIfControl).GetMean());
			}
			else
				Console.WriteLine("This model is not supported by Gibbs sampling.");
		}
Exemple #27
0
		public virtual void CreateModel()
		{
			NumComponents = 2;
			Range ComponentRange = new Range(NumComponents);
			InferenceEngine = new InferenceEngine(new VariationalMessagePassing());
			InferenceEngine.ShowProgress = false;

			AverageTimePriors = Variable.Array<Gaussian>(ComponentRange);
			TrafficNoisePriors = Variable.Array<Gamma>(ComponentRange);
			AverageTime = Variable.Array<double>(ComponentRange);
			TrafficNoise = Variable.Array<double>(ComponentRange);

			using (Variable.ForEach(ComponentRange))
			{
				AverageTime[ComponentRange] = Variable.Random<double, Gaussian>(AverageTimePriors[ComponentRange]);
				TrafficNoise[ComponentRange] = Variable.Random<double, Gamma>(TrafficNoisePriors[ComponentRange]);
			}

			//Mixing coefficients
			MixingPrior = Variable.New<Dirichlet>();
			MixingCoefficients = Variable<Vector>.Random(MixingPrior);
			MixingCoefficients.SetValueRange(ComponentRange);
		}
        public BayesPointMachine(int nFeatures, double noise)
        {
            // Training model
            nTrain = Variable.New<int>().Named("nTrain");
            Range trainItem = new Range(nTrain).Named("trainItem");
            trainingLabels = Variable.Array<bool>(trainItem).Named("trainingLabels");
            trainingItems = Variable.Array<Vector>(trainItem).Named("trainingItems");
            weights = Variable.Random(new VectorGaussian(Vector.Zero(nFeatures),
                PositiveDefiniteMatrix.Identity(nFeatures))).Named("weights");
            trainingLabels[trainItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, trainingItems[trainItem]), noise));

            // Testing model
            nTest = Variable.New<int>().Named("nTest");
            Range testItem = new Range(nTest).Named("testItem");
            testItems = Variable.Array<Vector>(testItem).Named("testItems");
            testLabels = Variable.Array<bool>(testItem).Named("testLabels");
            if (singleModel)
            {
                testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(weights, testItems[testItem]), noise));

                testEngine = new InferenceEngine(new ExpectationPropagation());
                testEngine.NumberOfIterations = 2;
            }
            else
            {
                weightPosterior = Variable.New<VectorGaussian>().Named("weightPosterior");
                Variable<Vector> testWeights = Variable<Vector>.Random(weightPosterior);
                testLabels[testItem] = Variable.IsPositive(Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(testWeights, testItems[testItem]), noise));

                trainEngine = new InferenceEngine(new ExpectationPropagation());
                trainEngine.ShowProgress = false;
                trainEngine.NumberOfIterations = 5;
                testEngine = new InferenceEngine(new ExpectationPropagation());
                testEngine.ShowProgress = false;
                testEngine.NumberOfIterations = 1;
            }
        }
        public override void Run()
        {
            var sourceMean = 11.4;
            var sourcePrecision = 0.01;
            var source = Gaussian.FromMeanAndPrecision(sourceMean, sourcePrecision);
            var series = new List<LabelledSeries<Tuple<double, double>>>();
            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Actual mean {0} precision {1}", source.GetMean(), source.Precision), Enumerable.Range(-30, 80).Select(x => Tuple.Create((double)x, Math.Exp(source.GetLogProb(x))))));

            // Prior distributions
            var meanPriorDistr = Gaussian.FromMeanAndPrecision(0, 0.01);
            var precisionPriorDistr = Gamma.FromMeanAndVariance(2, 5);

            var meanPrior = Variable.Random(meanPriorDistr).Named("mean");
            var precPrior = Variable.Random(precisionPriorDistr).Named("precision");
            var tv = Variable.New<int>();
            var tr = new Range(tv).Named("tr");
            var engine = new InferenceEngine();
            var xv = Variable.GaussianFromMeanAndPrecision(meanPrior, precPrior).Named("xv");
            var xs = Variable.Array<double>(tr).Named("xs");
            xs[tr] = xv.ForEach(tr);

            var maxSampleSize = 250;
            var sampleData = Enumerable.Range(0, maxSampleSize + 1).Select(_ => source.Sample()).ToArray();

            for (var i = 50; i <= maxSampleSize; i += 50)
            {
                tv.ObservedValue = i;
                xs.ObservedValue = sampleData.Take(i).ToArray();
                var meanPost = engine.Infer<Gaussian>(meanPrior);
                var precPost = engine.Infer<Gamma>(precPrior);
                var estimateDist = Gaussian.FromMeanAndPrecision(meanPost.GetMean(), precPost.GetMean());
                series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Implied mean {0} precision {1} with {2} samples", Math.Round(estimateDist.GetMean(), 4), Math.Round(estimateDist.Precision, 4), i), Enumerable.Range(-30, 80).Select(x => Tuple.Create((double)x, Math.Exp(estimateDist.GetLogProb(x))))));
            }

            this.Series = series.ToArray();
        }
		public void Run()
		{
			// This example requires EP
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation)) {
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			}

			// Define counts
			int numUsers = 50;
			int numItems = 10;
			int numTraits = 2;
			Variable<int> numObservations = Variable.Observed(100).Named("numObservations");
			int numLevels = 2;

			// Define ranges
			Range user = new Range(numUsers).Named("user");
			Range item = new Range(numItems).Named("item");
			Range trait = new Range(numTraits).Named("trait");
			Range observation = new Range(numObservations).Named("observation");
			Range level = new Range(numLevels).Named("level");

			// Define latent variables
			var userTraits = Variable.Array(Variable.Array<double>(trait), user).Named("userTraits");
			var itemTraits = Variable.Array(Variable.Array<double>(trait), item).Named("itemTraits");
			var userBias = Variable.Array<double>(user).Named("userBias");
			var itemBias = Variable.Array<double>(item).Named("itemBias");
			var userThresholds = Variable.Array(Variable.Array<double>(level), user).Named("userThresholds");

			// Define priors
			var userTraitsPrior = Variable.Array(Variable.Array<Gaussian>(trait), user).Named("userTraitsPrior");
			var itemTraitsPrior = Variable.Array(Variable.Array<Gaussian>(trait), item).Named("itemTraitsPrior");
			var userBiasPrior = Variable.Array<Gaussian>(user).Named("userBiasPrior");
			var itemBiasPrior = Variable.Array<Gaussian>(item).Named("itemBiasPrior");
			var userThresholdsPrior = Variable.Array(Variable.Array<Gaussian>(level), user).Named("userThresholdsPrior");

			// Define latent variables statistically
			userTraits[user][trait] = Variable<double>.Random(userTraitsPrior[user][trait]);
			itemTraits[item][trait] = Variable<double>.Random(itemTraitsPrior[item][trait]);
			userBias[user] = Variable<double>.Random(userBiasPrior[user]);
			itemBias[item] = Variable<double>.Random(itemBiasPrior[item]);
			userThresholds[user][level] = Variable<double>.Random(userThresholdsPrior[user][level]);

			// Initialise priors
			Gaussian traitPrior = Gaussian.FromMeanAndVariance(0.0, 1.0);
			Gaussian biasPrior = Gaussian.FromMeanAndVariance(0.0, 1.0);

			userTraitsPrior.ObservedValue = Util.ArrayInit(numUsers, u => Util.ArrayInit(numTraits, t => traitPrior));
			itemTraitsPrior.ObservedValue = Util.ArrayInit(numItems, i => Util.ArrayInit(numTraits, t => traitPrior));
			userBiasPrior.ObservedValue = Util.ArrayInit(numUsers, u => biasPrior);
			itemBiasPrior.ObservedValue = Util.ArrayInit(numItems, i => biasPrior);
			userThresholdsPrior.ObservedValue = Util.ArrayInit(numUsers, u =>
					Util.ArrayInit(numLevels, l => Gaussian.FromMeanAndVariance(l - numLevels / 2.0 + 0.5, 1.0)));

			// Break symmetry and remove ambiguity in the traits
			for (int i = 0; i < numTraits; i++) {
				// Assume that numTraits < numItems
				for (int j = 0; j < numTraits; j++) {
					itemTraitsPrior.ObservedValue[i][j] = Gaussian.PointMass(0);
				}
				itemTraitsPrior.ObservedValue[i][i] = Gaussian.PointMass(1);
			}

			// Declare training data variables
			var userData = Variable.Array<int>(observation).Named("userData");
			var itemData = Variable.Array<int>(observation).Named("itemData");
			var ratingData = Variable.Array(Variable.Array<bool>(level), observation).Named("ratingData");

			// Set model noises explicitly
			Variable<double> affinityNoiseVariance = Variable.Observed(0.1).Named("affinityNoiseVariance");
			Variable<double> thresholdsNoiseVariance = Variable.Observed(0.1).Named("thresholdsNoiseVariance");

			// Model
			using (Variable.ForEach(observation)) {
				VariableArray<double> products = Variable.Array<double>(trait).Named("products");
				products[trait] = userTraits[userData[observation]][trait] * itemTraits[itemData[observation]][trait];

				Variable<double> bias = (userBias[userData[observation]] + itemBias[itemData[observation]]).Named("bias");
				Variable<double> affinity = (bias + Variable.Sum(products).Named("productSum")).Named("affinity");
				Variable<double> noisyAffinity = Variable.GaussianFromMeanAndVariance(affinity, affinityNoiseVariance).Named("noisyAffinity");

				VariableArray<double> noisyThresholds = Variable.Array<double>(level).Named("noisyThresholds");
				noisyThresholds[level] = Variable.GaussianFromMeanAndVariance(userThresholds[userData[observation]][level], thresholdsNoiseVariance);
				ratingData[observation][level] = noisyAffinity > noisyThresholds[level];
			}

			// Observe training data
			GenerateData(numUsers, numItems, numTraits, numObservations.ObservedValue, numLevels,
									 userData, itemData, ratingData,
									 userTraitsPrior.ObservedValue, itemTraitsPrior.ObservedValue,
									 userBiasPrior.ObservedValue, itemBiasPrior.ObservedValue, userThresholdsPrior.ObservedValue,
									 affinityNoiseVariance.ObservedValue, thresholdsNoiseVariance.ObservedValue);

			// Allow EP to process the product factor as if running VMP
			// as in Stern, Herbrich, Graepel paper.
			engine.Compiler.GivePriorityTo(typeof(GaussianProductOp_SHG09));
			engine.Compiler.ShowWarnings = true;

			// Run inference
			var userTraitsPosterior = engine.Infer<Gaussian[][]>(userTraits);
			var itemTraitsPosterior = engine.Infer<Gaussian[][]>(itemTraits);
			var userBiasPosterior = engine.Infer<Gaussian[]>(userBias);
			var itemBiasPosterior = engine.Infer<Gaussian[]>(itemBias);
			var userThresholdsPosterior = engine.Infer<Gaussian[][]>(userThresholds);

			// Feed in the inferred posteriors as the new priors
			userTraitsPrior.ObservedValue = userTraitsPosterior;
			itemTraitsPrior.ObservedValue = itemTraitsPosterior;
			userBiasPrior.ObservedValue = userBiasPosterior;
			itemBiasPrior.ObservedValue = itemBiasPosterior;
			userThresholdsPrior.ObservedValue = userThresholdsPosterior;

			// Make a prediction
			numObservations.ObservedValue = 1;
			userData.ObservedValue = new int[] { 5 };
			itemData.ObservedValue = new int[] { 6 };
			ratingData.ClearObservedValue();

			Bernoulli[] predictedRating = engine.Infer<Bernoulli[][]>(ratingData)[0];
			Console.WriteLine("Predicted rating:");
			foreach (var rating in predictedRating) Console.WriteLine(rating);
		}