예제 #1
0
        private static void AngleScaleTest()
        {
            Variable<double> angle = Variable.GaussianFromMeanAndVariance(Math.PI / 6, 0.4 * 0.4);
            Variable<double> scaleX = Variable.GaussianFromMeanAndVariance(1.5, 2 * 2);
            Variable<double> scaleY = Variable.GaussianFromMeanAndVariance(1.5, 2 * 2);
            Variable<PositiveDefiniteMatrix> prec = Variable<PositiveDefiniteMatrix>.Factor(ShapeFactors.MatrixFromAngleScale, scaleX, scaleY, angle);

            Vector trueMean = Vector.Zero(2);

            Range pointRange = new Range(5000);
            VariableArray<Vector> points = Variable.Array<Vector>(pointRange);
            points[pointRange] = Variable.VectorGaussianFromMeanAndPrecision(trueMean, prec).ForEach(pointRange);

            PositiveDefiniteMatrix truePrec = ShapeFactors.MatrixFromAngleScale(2.0, 3.0, Math.PI / 5);
            Vector[] observedPoints = Util.ArrayInit(pointRange.SizeAsInt, i => VectorGaussian.Sample(trueMean, truePrec));

            points.ObservedValue = observedPoints;
            prec.AddAttribute(new PointEstimate());
            prec.AddAttribute(new MarginalPrototype(new Wishart(2)));

            InferenceEngine engine = new InferenceEngine();
            engine.Compiler.RequiredQuality = engine.Compiler.RecommendedQuality = QualityBand.Unknown;
            Console.WriteLine(engine.Infer(angle));
            Console.WriteLine(engine.Infer(scaleX));
            Console.WriteLine(engine.Infer(scaleY));
        }
예제 #2
0
        static void Main(string[] args)
        {
            // data
            ArrayList trainingData = readData(@"C:\Users\Jeremy\Documents\Visual Studio 2010\Projects\InferSandbox\train.txt");

            // Create target y
            VariableArray<double> y = Variable.Observed((double[])(trainingData[trainingData.Count-1])).Named("y");
            Variable<Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(trainingData.Count),
                PositiveDefiniteMatrix.Identity(trainingData.Count))).Named("w");
            trainingData.RemoveAt(trainingData.Count - 1);
            BayesPointMachine(trainingData, w, y);

            InferenceEngine engine = new InferenceEngine();
            if (!(engine.Algorithm is GibbsSampling))
            {
                VectorGaussian wPosterior = engine.Infer<VectorGaussian>(w);
                Console.WriteLine("Dist over w=\n" + wPosterior);

                ArrayList testData = readData(@"C:\Users\Jeremy\Documents\Visual Studio 2010\Projects\InferSandbox\test.txt");

                VariableArray<double> ytest = Variable.Array<double>(new Range(((double[])(testData[0])).Length)).Named("ytest");
                BayesPointMachine(testData, Variable.Random(wPosterior).Named("w"), ytest);
                Console.WriteLine("output=\n" + engine.Infer(ytest));
            }
            else Console.WriteLine("This model has a non-conjugate factor, and therefore cannot use Gibbs sampling");
        }
        //public int[] Classification(Vector[] features, int[] labels)
        //{
        //}
        public double[] Regression(Vector[] features, double[] values)
        {
            var wMeans = Variable.Vector(Vector.Zero(features[0].Count).ToArray());
            var wPrecision = Variable.WishartFromShapeAndRate(100, PositiveDefiniteMatrix.IdentityScaledBy(features[0].Count, 0.01));
            var w = Variable.VectorGaussianFromMeanAndPrecision(wMeans, wPrecision).Named("w");
            var numItems = Variable.New<int>().Named("numItems");
            var i = new Range(numItems).Named("i");
            i.AddAttribute(new Sequential());

            var noisePrecision = Variable.New<double>().Named("noisePrecision");

            var x = Variable.Array<Vector>(i).Named("x");
            var y = Variable.Array<double>(i).Named("y");

            using (Variable.ForEach(i))
            {
                y[i] = Variable.GaussianFromMeanAndPrecision(Variable.InnerProduct(w, x[i]), noisePrecision);
            }

            numItems.ObservedValue = features.Length;
            x.ObservedValue = features;
            y.ObservedValue = values;

            var engine = new InferenceEngine();
            engine.Compiler.UseSerialSchedules = true;
            engine.ShowProgress = false;
            var wPosterior = engine.Infer<VectorGaussian>(w);
            y.ClearObservedValue();
            w.ObservedValue = wPosterior.GetMean();
            var inferredValues = engine.Infer<IList<Gaussian>>(y);
            return inferredValues.Select(v => v.GetMean()).ToArray();
        }
예제 #4
0
		public void Run()
		{
			// data
			double[] incomes = { 63, 16, 28, 55, 22, 20 };
			double[] ages = { 38, 23, 40, 27, 18, 40 };
			bool[] willBuy = { true, false, true, true, false, false };

			// Create target y
			VariableArray<bool> y = Variable.Observed(willBuy).Named("y");
			Variable<Vector> w = Variable.Random(new VectorGaussian(Vector.Zero(3),
				PositiveDefiniteMatrix.Identity(3))).Named("w");
			BayesPointMachine(incomes, ages, w, y);

			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is GibbsSampling))
			{
				VectorGaussian wPosterior = engine.Infer<VectorGaussian>(w);
				Console.WriteLine("Dist over w=\n"+wPosterior);

				double[] incomesTest = { 58, 18, 22 };
				double[] agesTest = { 36, 24, 37 };
				VariableArray<bool> ytest = Variable.Array<bool>(new Range(agesTest.Length)).Named("ytest");
				BayesPointMachine(incomesTest, agesTest, Variable.Random(wPosterior).Named("w"), ytest);
				Console.WriteLine("output=\n" + engine.Infer(ytest));
			}
			else Console.WriteLine("This model has a non-conjugate factor, and therefore cannot use Gibbs sampling");

		}
        public InferenceResult<Cluster[]> Infer(Vector[] observedData, int clusters)
        {
            var dimensions = observedData.First().Count;
            var evidence = Variable.Bernoulli(0.5).Named("evidence");
            var evidenceBlock = Variable.If(evidence);
            var clustersRange = new Range(clusters).Named("clustersRange");
            var meansPrior = Variable.Array<Vector>(clustersRange).Named("meansPrior");
            meansPrior[clustersRange] = Variable
                .VectorGaussianFromMeanAndPrecision(
                    Vector.Zero(dimensions),
                    PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01))
                .ForEach(clustersRange);

            var precisionsPrior = Variable.Array<PositiveDefiniteMatrix>(clustersRange).Named("precisionsPrior");
            precisionsPrior[clustersRange] = Variable.WishartFromShapeAndRate(100, PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01))
                .ForEach(clustersRange);

            var initialWeights = Enumerable.Range(0, clusters).Select(_ => 1.0).ToArray();
            var mixtureWeightsPrior = Variable.Dirichlet(clustersRange, initialWeights).Named("mixtureWeightsPrior");

            var dataRange = new Range(observedData.Length).Named("dataRange");
            var data = Variable.Array<Vector>(dataRange).Named("data");

            var latentIndex = Variable.Array<int>(dataRange).Named("latentIndex");

            using (Variable.ForEach(dataRange))
            {
                latentIndex[dataRange] = Variable.Discrete(mixtureWeightsPrior);
                using (Variable.Switch(latentIndex[dataRange]))
                {
                    data[dataRange] = Variable.VectorGaussianFromMeanAndPrecision(meansPrior[latentIndex[dataRange]], precisionsPrior[latentIndex[dataRange]]);
                }
            }

            var zinit = new Discrete[dataRange.SizeAsInt];
            for (int i = 0; i < zinit.Length; i++)
                zinit[i] = Discrete.PointMass(Rand.Int(clustersRange.SizeAsInt), clustersRange.SizeAsInt);
            latentIndex.InitialiseTo(Distribution<int>.Array(zinit));

            evidenceBlock.CloseBlock();

            data.ObservedValue = observedData;

            var ie = new InferenceEngine(new VariationalMessagePassing());
            ie.ShowProgress = false;

            var mixtureWeightsPosterior = ie.Infer(mixtureWeightsPrior);
            var meansPosterior = ie.Infer<VectorGaussian[]>(meansPrior);
            var precisionsPosterior = ie.Infer<Wishart[]>(precisionsPrior);
            var bEvidence = ie.Infer<Bernoulli>(evidence);

            var result = new List<Cluster>();
            for (var i = 0; i < clusters; i++)
            {
                result.Add(new Cluster(meansPosterior[i].GetMean(), precisionsPosterior[i].GetMean().Inverse()));
            }

            return new InferenceResult<Cluster[]>(bEvidence, result.ToArray());
        }
예제 #6
0
		// Sample data from a DINA/NIDA model and then use Infer.NET to recover the parameters.
		public void Run()
		{
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			}

			bool useDina = true;
			Beta slipPrior = new Beta(1, 10);
			Beta guessPrior = new Beta(1, 10);
			Rand.Restart(0);
			int nStudents = 100;
			int nQuestions = 20;
			int nSkills = 3;
			int[][] skillsRequired = new int[nQuestions][];
			for (int q = 0; q < nQuestions; q++) {
				// each question requires a random set of skills
				int[] skills = Rand.Perm(nSkills);
				int n = Rand.Int(nSkills)+1;
				skillsRequired[q] = Util.ArrayInit(n, i => skills[i]);
				Console.WriteLine("skillsRequired[{0}] = {1}", q, Util.CollectionToString(skillsRequired[q]));
			}
			double[] pSkill, slip, guess;
			bool[][] hasSkill;
			VariableArray<double> slipVar, guessVar, pSkillVar;
			VariableArray<VariableArray<bool>,bool[][]> hasSkillVar;
			if (useDina) {
				bool[][] responses = DinaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				DinaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			} else {
				bool[][] responses = NidaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				NidaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			}

			engine.NumberOfIterations = 10;
			Bernoulli[][] hasSkillPost = engine.Infer<Bernoulli[][]>(hasSkillVar);
			int numErrors = 0;
			for (int i = 0; i < nStudents; i++) {
				for (int s = 0; s < nSkills; s++) {
					if (hasSkill[i][s] != (hasSkillPost[i][s].LogOdds > 0)) numErrors++;
				}
			}
			Console.WriteLine("{0:0}% of skills recovered correctly", 100.0 - 100.0*numErrors/(nStudents*nSkills));
			Beta[] pSkillPost = engine.Infer<Beta[]>(pSkillVar);
			Beta[] slipPost = engine.Infer<Beta[]>(slipVar);
			Beta[] guessPost = engine.Infer<Beta[]>(guessVar);
			for (int s = 0; s < nSkills; s++) {
				Console.WriteLine("pSkill[{0}] = {1} (sampled from {2})", s, pSkillPost[s], pSkill[s].ToString("g4"));				
			}
			for (int i = 0; i < Math.Min(3,slipPost.Length); i++)	{
				Console.WriteLine("slip[{0}] = {1} (sampled from {2})", i, slipPost[i], slip[i].ToString("g4"));			 
			}
			for (int i = 0; i < Math.Min(3,guessPost.Length); i++) {
				Console.WriteLine("guess[{0}] = {1} (sampled from {2})", i, guessPost[i], guess[i].ToString("g4"));
			}
		}
예제 #7
0
		public void Run()
		{
			// Define a range for the number of mixture components
			Range k = new Range(2).Named("k");

			// Mixture component means
			VariableArray<Vector> means = Variable.Array<Vector>(k).Named("means");			
			means[k] = Variable.VectorGaussianFromMeanAndPrecision(
				Vector.FromArray(0.0,0.0),
				PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k);
	
			// Mixture component precisions
			VariableArray<PositiveDefiniteMatrix> precs = Variable.Array<PositiveDefiniteMatrix>(k).Named("precs");
			precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k);
			
			// Mixture weights 
			Variable<Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights");	

			// Create a variable array which will hold the data
			Range n = new Range(300).Named("n");
			VariableArray<Vector> data = Variable.Array<Vector>(n).Named("x");
			// Create latent indicator variable for each data point
			VariableArray<int> z = Variable.Array<int>(n).Named("z");

			// The mixture of Gaussians model
			using (Variable.ForEach(n)) {
				z[n] = Variable.Discrete(weights);
				using (Variable.Switch(z[n])) {
					data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
				}
			}

			// Attach some generated data
			data.ObservedValue = GenerateData(n.SizeAsInt);

			// Initialise messages randomly so as to break symmetry
			Discrete[] zinit = new Discrete[n.SizeAsInt];		
			for (int i = 0; i < zinit.Length; i++) 
			  zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt);
			z.InitialiseTo(Distribution<int>.Array(zinit)); 

			// The inference
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("Dist over pi=" + ie.Infer(weights));
				Console.WriteLine("Dist over means=\n" + ie.Infer(means));
				Console.WriteLine("Dist over precs=\n" + ie.Infer(precs));
			}
			else
				Console.WriteLine("This example is not supported by Expectation Propagation");
		}
예제 #8
0
        public override void Run()
        {
            var rangeMin = -10;
            var interval = 0.1;
            var observationSize = 100;
            var aActual = 0.2;
            var bActual = 2.3;
            var rand = new System.Random();
            var actuals = Enumerable.Range(rangeMin, observationSize)
                    .Select(i => i * interval)
                    .Select(i => Tuple.Create((double) i, bActual * i + aActual))
                    .ToArray();
            var samples = actuals.Select(tuple => Tuple.Create(tuple.Item1, tuple.Item2 + ((rand.NextDouble() - 0.5) * 10))).ToArray();

            var series = new List<LabelledSeries<Tuple<double, double>>>();
            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Actual a+bx a={0} b={1}", aActual, bActual), actuals));

            var aPrior = Variable.GaussianFromMeanAndPrecision(0, 0.01).Named("aPrior");
            var bPrior = Variable.GaussianFromMeanAndPrecision(0, 0.01).Named("bPrior");
            var noisePrior = Variable.GammaFromShapeAndScale(1, 5).Named("noisePrior");
            var obsRange = new Range(samples.Length);
            var xArray = Variable.Array<double>(obsRange);
            var exprArray = Variable.Array<double>(obsRange);
            using (Variable.ForEach(obsRange))
            {
                exprArray[obsRange] = Variable.GaussianFromMeanAndPrecision(aPrior + xArray[obsRange] * bPrior, noisePrior);
            }

            xArray.ObservedValue = samples.Select(t => (double)t.Item1).ToArray();
            exprArray.ObservedValue = samples.Select(t => t.Item2).ToArray();

            var engine = new InferenceEngine();
            var aPosterior = engine.Infer<Gaussian>(aPrior);
            var bPosterior = engine.Infer<Gaussian>(bPrior);
            var noisePosterior = engine.Infer<Gamma>(noisePrior);

            var aInferred = aPosterior.GetMean();
            var bInferred = bPosterior.GetMean();
            var inferred = Enumerable.Range(rangeMin, observationSize)
                            .Select(i => i * interval)
                            .Select(i => Tuple.Create((double)i, bInferred * i + aInferred))
                            .ToArray();

            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Inferred a+bx a={0} b={1}", Math.Round(aInferred, 4), Math.Round(bInferred, 4)), inferred));

            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Data", aActual, bActual), samples) { IsScatter = true });

            this.Series = series.ToArray();
        }
예제 #9
0
		public void Run()
		{
			Rand.Restart(12347);

			// The model
			int N = RatsHeightData.GetLength(0);
			int T = RatsHeightData.GetLength(1);
			Range r = new Range(N).Named("N");
			Range w = new Range(T).Named("T");

			Variable<double> alphaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("alphaC");
			Variable<double> alphaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("alphaTau");
			VariableArray<double> alpha = Variable.Array<double>(r).Named("alpha");
			alpha[r] = Variable.GaussianFromMeanAndPrecision(alphaC, alphaTau).ForEach(r);

			Variable<double> betaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("betaC");
			Variable<double> betaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("betaTau");
			VariableArray<double> beta = Variable.Array<double>(r).Named("beta");
			beta[r] = Variable.GaussianFromMeanAndPrecision(betaC, betaTau).ForEach(r);

			Variable<double> tauC = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("tauC");
			VariableArray<double> x = Variable.Observed<double>(RatsXData, w).Named("x");
			Variable<double> xbar = Variable.Sum(x)/T;
			VariableArray2D<double> y = Variable.Observed<double>(RatsHeightData, r, w).Named("y");
			y[r, w] = Variable.GaussianFromMeanAndPrecision(alpha[r] + (beta[r] * (x[w]-xbar)), tauC);
			Variable<double> alpha0 = (alphaC - betaC * xbar).Named("alpha0");

			// Initialise with the mean of the prior (needed for Gibbs to converge quickly)
			alphaC.InitialiseTo(Gaussian.PointMass(0.0));
			tauC.InitialiseTo(Gamma.PointMass(1.0));
			alphaTau.InitialiseTo(Gamma.PointMass(1.0));
			betaTau.InitialiseTo(Gamma.PointMass(1.0));

			// Inference engine
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is ExpectationPropagation))
			{
				Gaussian betaCMarg = ie.Infer<Gaussian>(betaC);
				Gaussian alpha0Marg = ie.Infer<Gaussian>(alpha0);
				Gamma tauCMarg = ie.Infer<Gamma>(tauC);

				// Inference
				Console.WriteLine("alpha0 = {0}[sd={1}]", alpha0Marg, Math.Sqrt(alpha0Marg.GetVariance()).ToString("g4"));
				Console.WriteLine("betaC = {0}[sd={1}]", betaCMarg, Math.Sqrt(betaCMarg.GetVariance()).ToString("g4"));
				Console.WriteLine("tauC = {0}", tauCMarg);
			}
			else
				Console.WriteLine("This example does not run with Expectation Propagation");
		}
예제 #10
0
		public void Run()
		{
			Variable<bool> firstCoin = Variable.Bernoulli(0.5).Named("firstCoin");
			Variable<bool> secondCoin = Variable.Bernoulli(0.5).Named("secondCoin");
			Variable<bool> bothHeads  = (firstCoin & secondCoin).Named("bothHeads");
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is VariationalMessagePassing))
			{
				Console.WriteLine("Probability both coins are heads: "+ie.Infer(bothHeads));
				bothHeads.ObservedValue=false;
				Console.WriteLine("Probability distribution over firstCoin: " + ie.Infer(firstCoin));
			}
			else
				Console.WriteLine("This example does not run with Variational Message Passing");
		}
        public override void Run()
        {
            var prices = GetPrices("SPY").Select(t => t.Item2).ToArray();
            var inputs = prices.Take(prices.Length - 1).ToArray();
            var inputVectors = inputs.Select(i => Vector.FromArray(new[] { i })).ToArray();
            var outputs = prices.Skip(1).ToArray();

            // Set up the GP prior, which will be filled in later
            var prior = Variable.New<SparseGP>().Named("prior");

            // The sparse GP variable - a distribution over functions
            var f = Variable<IFunction>.Random(prior).Named("f");

            // The locations to evaluate the function
            var x = Variable.Observed(inputVectors).Named("x");
            var j = x.Range.Named("j");
            var y = Variable.Observed(outputs, j).Named("y");
            y[j] = Variable.GaussianFromMeanAndVariance(Variable.FunctionEvaluate(f, x[j]), 0.1);
            var kf = new SquaredExponential(-0.5);

            // The basis
            var rand = new Random(Environment.TickCount);
            var basis = Enumerable.Range(1, 10).Select(i => Vector.FromArray(new double[1] {i*10})).ToArray();
            //var basis = new Vector[] {
            //    Vector.FromArray(new double[1] {80}),
            //    Vector.FromArray(new double[1] {90}),
            //    Vector.FromArray(new double[1] {100})
            //};

            var gp = new GaussianProcess(new ConstantFunction(0), kf);

            prior.ObservedValue = new SparseGP(new SparseGPFixed(gp, basis));
            var engine = new InferenceEngine(new ExpectationPropagation());
            var sgp = engine.Infer<SparseGP>(f);

            var means = sgp.Mean(inputVectors).ToArray();
            var stdDevs = inputVectors.Select(iv => Math.Sqrt(sgp.Variance(iv))).ToArray();

            this.Series = new[]
            {
                new LabelledSeries<Tuple<double,double>>(
                    "input",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, inputs[i]))),
                new LabelledSeries<Tuple<double,double>>(
                    "infered mean",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, means[i]))),
                new LabelledSeries<Tuple<double,double>>(
                    "infered stddev",
                    Enumerable.Range(0,inputs.Length)
                    .Select(i=> Tuple.Create((double)i, stdDevs[i]))),
            };
        }
예제 #12
0
        public static Tuple<VectorGaussian, Wishart> LearnGaussian(Vector[] obs)
        {
            int numData = obs.Length;
            int dim = obs[0].Count;
            Variable<Vector> mean = Variable.VectorGaussianFromMeanAndPrecision(
                Vector.Zero(dim),
                PositiveDefiniteMatrix.IdentityScaledBy(dim, 10.0)).Named("mean");
            Variable<PositiveDefiniteMatrix> prec = Variable.WishartFromShapeAndScale(
                100.0, PositiveDefiniteMatrix.IdentityScaledBy(dim, 0.01));
            Range n = new Range(obs.Length).Named("n");
            VariableArray<Vector> data = Variable.Array<Vector>(n).Named("x");
            data[n] = Variable.VectorGaussianFromMeanAndPrecision(mean, prec).ForEach(n);
            data.ObservedValue = obs;

            var engine = new InferenceEngine(new VariationalMessagePassing());
            var meanPosterior = engine.Infer<VectorGaussian>(mean);
            var precPosterior = engine.Infer<Wishart>(prec);

            return new Tuple<VectorGaussian, Wishart>(meanPosterior, precPosterior);
        }
예제 #13
0
		public static void RunCyclingTime1()
		{
			//[1] The model
			Variable<double> averageTime = Variable.GaussianFromMeanAndPrecision(15, 0.01);
			Variable<double> trafficNoise = Variable.GammaFromShapeAndScale(2.0, 0.5);

			Variable<double> travelTimeMonday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);
			Variable<double> travelTimeTuesday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);
			Variable<double> travelTimeWednesday = Variable.GaussianFromMeanAndPrecision(averageTime, trafficNoise);

			//[2] Train the model
			travelTimeMonday.ObservedValue = 13;
			travelTimeTuesday.ObservedValue = 17;
			travelTimeWednesday.ObservedValue = 16;

			InferenceEngine engine = new InferenceEngine();

			Gaussian averageTimePosterior = engine.Infer<Gaussian>(averageTime);
			Gamma trafficNoisePosterior = engine.Infer<Gamma>(trafficNoise);

			Console.WriteLine("averageTimePosterior: " + averageTimePosterior);
			Console.WriteLine("trafficNoisePosterior: " + trafficNoisePosterior);

			//[3] Add a prediction variable and retrain the model
			Variable<double> tomorrowsTime = Variable.GaussianFromMeanAndPrecision(
				averageTime,
				trafficNoise);

			Gaussian tomorrowsTimeDist = engine.Infer<Gaussian>(tomorrowsTime);
			double tomorrowsMean = tomorrowsTimeDist.GetMean();
			double tomorrowsStdDev = Math.Sqrt(tomorrowsTimeDist.GetVariance());

			// Write out the results.
			Console.WriteLine("Tomorrows predicted time: {0:f2} plus or minus {1:f2}", tomorrowsMean, tomorrowsStdDev);

			// You can also ask other questions of the model 
			double probTripTakesLongerThan18Minutes = engine.Infer<Bernoulli>(tomorrowsTime < 18.0).GetProbTrue();
			Console.WriteLine(
				"Probability that the trip takes less than 18 min: {0:f2}",
				probTripTakesLongerThan18Minutes);
		}
예제 #14
0
		public void Run()
		{
			// Sample data from standard Gaussian
			double[] data = new double[100];
			for (int i = 0; i < data.Length; i++) data[i] = Rand.Normal(0, 1);

			// Create mean and precision random variables
			Variable<double> mean = Variable.GaussianFromMeanAndVariance(0, 100).Named("mean");
			Variable<double> precision = Variable.GammaFromShapeAndScale(1, 1).Named("precision");

			for (int i = 0; i < data.Length; i++)
			{
				Variable<double> x= Variable.GaussianFromMeanAndPrecision(mean, precision).Named("x"+i);
				x.ObservedValue=data[i];
			}

			InferenceEngine engine = new InferenceEngine();
			// Retrieve the posterior distributions
			Console.WriteLine("mean=" + engine.Infer(mean));
			Console.WriteLine("prec=" + engine.Infer(precision));
		}
예제 #15
0
		public void Run()
		{
			for (double thresh = 0; thresh <= 1; thresh += 0.1)
			{
				Variable<double> x = Variable.GaussianFromMeanAndVariance(0, 1).Named("x");
				Variable.ConstrainTrue(x > thresh);
				InferenceEngine engine = new InferenceEngine();
				if (engine.Algorithm is ExpectationPropagation)
					Console.WriteLine("Dist over x given thresh of " + thresh + "=" + engine.Infer(x));
				else
					Console.WriteLine("This example only runs with Expectation Propagation");
			}
		}
예제 #16
0
		public void Run()
		{
			// Data from clinical trial
			VariableArray<bool> controlGroup = 
				Variable.Observed(new bool[] { false, false, true, false, false }).Named("controlGroup");
			VariableArray<bool> treatedGroup = 
				Variable.Observed(new bool[] { true, false, true, true, true }).Named("treatedGroup");
			Range i = controlGroup.Range.Named("i"); Range j = treatedGroup.Range.Named("j");

			// Prior on being effective treatment
			Variable<bool> isEffective = Variable.Bernoulli(0.5).Named("isEffective");
			Variable<double> probIfTreated, probIfControl;
			using (Variable.If(isEffective))
			{
				// Model if treatment is effective
				probIfControl = Variable.Beta(1, 1).Named("probIfControl");
				controlGroup[i] = Variable.Bernoulli(probIfControl).ForEach(i);
				probIfTreated = Variable.Beta(1, 1).Named("probIfTreated");
				treatedGroup[j] = Variable.Bernoulli(probIfTreated).ForEach(j);
			}
			using (Variable.IfNot(isEffective))
			{
				// Model if treatment is not effective
				Variable<double> probAll = Variable.Beta(1, 1).Named("probAll");
				controlGroup[i] = Variable.Bernoulli(probAll).ForEach(i);
				treatedGroup[j] = Variable.Bernoulli(probAll).ForEach(j);
			}
			InferenceEngine ie = new InferenceEngine();
			if (!(ie.Algorithm is GibbsSampling))
			{
				Console.WriteLine("Probability treatment has an effect = " + ie.Infer(isEffective));
				Console.WriteLine("Probability of good outcome if given treatment = " 
										+ (float)ie.Infer<Beta>(probIfTreated).GetMean());
				Console.WriteLine("Probability of good outcome if control = " 
										+ (float)ie.Infer<Beta>(probIfControl).GetMean());
			}
			else
				Console.WriteLine("This model is not supported by Gibbs sampling.");
		}
예제 #17
0
        public override void Run()
        {
            var inputs = new[]
            {
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Star), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Ring), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Blue), GetShapeAttributeArray(Shapes.Rectangle), new double[] { 15 }),

                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Star), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Arrow), new double[] { 10 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Trapezium), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Green), GetShapeAttributeArray(Shapes.Diamond), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Triangle), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Ring), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Yellow), GetShapeAttributeArray(Shapes.Circle), new double[] { 15 }),
                JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Ellipse), new double[] { 15 }),
            };

            var outputs = new bool[] { true, true, true, true, true, true, true, true, false, false, false, false, false, false, false, false };

            var j = new Range(inputs.Length);
            var noise = Variable.GammaFromMeanAndVariance(1, 1);
            var X = Variable.Observed(inputs.Select(i => Vector.FromArray(i)).ToArray(), j).Named("X");
            var Y = Variable.Observed(outputs, j).Named("Y");
            var weights = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(inputs.First().Length), PositiveDefiniteMatrix.Identity(inputs.First().Length))
                .Named("weights");
            Y[j] = Variable.GaussianFromMeanAndPrecision(Variable.InnerProduct(X[j], weights), noise) > 0;
            var engine = new InferenceEngine();
            var posteriorWeightsDist = engine.Infer<VectorGaussian>(weights);
            var posteriorNoiseDist = engine.Infer<Gamma>(noise);
            weights = Variable.Random(posteriorWeightsDist);
            var testCase = JoinArrays(GetColorAttributeArray(Color.Red), GetShapeAttributeArray(Shapes.Trapezium), new double[] { 15 });
            var testClassification = engine.Infer<Bernoulli>(Variable.InnerProduct(Vector.FromArray(testCase), weights) > 0);
        }
예제 #18
0
 //Addition by Guy Templeton, get log evidence from learned mixing coeff.
 public double GetLogEvidence()
 {
     Variable<bool> evidence = Variable.Bernoulli(0.5).Named("evidence");
     Range classes = new Range(numOfClasses);
     IfBlock block = Variable.If(evidence);
     VectorGaussian[] wObserved = trainModel.wPrior.ObservedValue;
     VectorGaussian[] empty = Util.ArrayInit(numOfClasses, c => (c == 0) ?
         VectorGaussian.PointMass(Vector.Zero(numOfFeatures)) :
         VectorGaussian.FromMeanAndPrecision(Vector.Zero(numOfFeatures), PositiveDefiniteMatrix.Identity(numOfFeatures)));
     block.CloseBlock();
     InferenceEngine engine = new InferenceEngine();
     return engine.Infer<Bernoulli>(evidence).LogOdds;
 }
예제 #19
0
 public static VectorGaussian Conditional(Tuple<VectorGaussian, Wishart> priors, int observedIndex, double observedValue)
 {
     Variable<Vector> mean = Variable.Random(priors.Item1);
     Variable<PositiveDefiniteMatrix> prec = Variable.Random(priors.Item2);
     Variable<Vector> v = Variable.VectorGaussianFromMeanAndPrecision(mean, prec);
     // Initialise v to a proper distribution (to avoid improper messages)
     v.InitialiseTo(new VectorGaussian(priors.Item1.GetMean(), priors.Item2.GetMean()));
     Variable<double> observedV = Variable.GetItem(v, observedIndex);
     observedV.ObservedValue = observedValue;
     var engine = new InferenceEngine(new VariationalMessagePassing());
     var vPosterior = engine.Infer<VectorGaussian>(v);
     return vPosterior;
 }
예제 #20
0
        public override void Run()
        {
            var sourceMean = 11.4;
            var sourcePrecision = 0.01;
            var source = Gaussian.FromMeanAndPrecision(sourceMean, sourcePrecision);
            var series = new List<LabelledSeries<Tuple<double, double>>>();
            series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Actual mean {0} precision {1}", source.GetMean(), source.Precision), Enumerable.Range(-30, 80).Select(x => Tuple.Create((double)x, Math.Exp(source.GetLogProb(x))))));

            // Prior distributions
            var meanPriorDistr = Gaussian.FromMeanAndPrecision(0, 0.01);
            var precisionPriorDistr = Gamma.FromMeanAndVariance(2, 5);

            var meanPrior = Variable.Random(meanPriorDistr).Named("mean");
            var precPrior = Variable.Random(precisionPriorDistr).Named("precision");
            var tv = Variable.New<int>();
            var tr = new Range(tv).Named("tr");
            var engine = new InferenceEngine();
            var xv = Variable.GaussianFromMeanAndPrecision(meanPrior, precPrior).Named("xv");
            var xs = Variable.Array<double>(tr).Named("xs");
            xs[tr] = xv.ForEach(tr);

            var maxSampleSize = 250;
            var sampleData = Enumerable.Range(0, maxSampleSize + 1).Select(_ => source.Sample()).ToArray();

            for (var i = 50; i <= maxSampleSize; i += 50)
            {
                tv.ObservedValue = i;
                xs.ObservedValue = sampleData.Take(i).ToArray();
                var meanPost = engine.Infer<Gaussian>(meanPrior);
                var precPost = engine.Infer<Gamma>(precPrior);
                var estimateDist = Gaussian.FromMeanAndPrecision(meanPost.GetMean(), precPost.GetMean());
                series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Implied mean {0} precision {1} with {2} samples", Math.Round(estimateDist.GetMean(), 4), Math.Round(estimateDist.Precision, 4), i), Enumerable.Range(-30, 80).Select(x => Tuple.Create((double)x, Math.Exp(estimateDist.GetLogProb(x))))));
            }

            this.Series = series.ToArray();
        }
예제 #21
0
        public VectorGaussian BatchRegression(Vector[] xdata, double[] ydata)
        {
            int   ndim = xdata[0].Count;
            Range row  = new Range(xdata.Length);

            VariableArray <Vector> x = Variable.Observed(xdata, row).Named("x");

            // Set a prior on the weights and sample from it
            Variable <Vector> w = Variable.VectorGaussianFromMeanAndPrecision(Vector.Constant(ndim, 0.0), PositiveDefiniteMatrix.Identity(ndim)).Named("w");

            // Multiply to determine obeservations
            VariableArray <double> y = Variable.Array <double>(row);

            y[row]          = Variable.GaussianFromMeanAndVariance(Variable.InnerProduct(x[row], w), 1.0);
            y.ObservedValue = ydata;

            InferenceEngine engine = new InferenceEngine();
            VectorGaussian  postW  = engine.Infer <VectorGaussian>(w);

            return(postW);
        }
예제 #22
0
        public void ConstrainEqualManyToOneTest()
        {
            double bPrior = 0.1;
            double cPrior = 0.2;
            var    c      = Variable.Bernoulli(cPrior).Named("c");
            Range  i      = new Range(2).Named("i");
            var    bools  = Variable.Array <bool>(i).Named("bools");

            using (Variable.ForEach(i))
            {
                bools[i] = Variable.Bernoulli(bPrior);
                Variable.ConstrainEqual(bools[i], c);
            }
            bools.ObservedValue = new bool[] { true, true };
            InferenceEngine engine    = new InferenceEngine();
            Bernoulli       cActual   = engine.Infer <Bernoulli>(c);
            Bernoulli       cExpected = new Bernoulli(1);

            Console.WriteLine("c = {0} should be {1}", cActual, cExpected);
            Assert.True(cExpected.MaxDiff(cActual) < 1e-10);
        }
예제 #23
0
        public void BinomialMarginalPrototype2()
        {
            Variable <int> n = Variable.New <int>().Named("n");

            //Variable<int> x = Variable.Binomial(n, 0.1);
            n.ObservedValue = 4;
            var x = Variable.New <int>();
            var b = Variable.Observed(false);

            using (Variable.If(b))
            {
                x.SetTo(Variable.DiscreteUniform(n + 1));
            }
            using (Variable.IfNot(b))
            {
                x.SetTo(Variable.Binomial(n, 0.1));
            }
            InferenceEngine ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(x));
        }
예제 #24
0
        public void CaseExampleEnum()
        {
            Variable <Coin>   c    = Variable.EnumDiscrete <Coin>(new double[] { 0.5, 0.5 });
            Variable <double> x    = Variable.New <double>();
            Variable <int>    cint = Variable.EnumToInt(c);

            using (Variable.Case(cint, 0))
            {
                x.SetTo(Variable.GaussianFromMeanAndVariance(1, 1));
            }
            using (Variable.Case(cint, 1))
            {
                x.SetTo(Variable.GaussianFromMeanAndVariance(2, 1));
            }
            InferenceEngine engine   = new InferenceEngine();
            Gaussian        expected = new Gaussian(1.5, 1.25);
            Gaussian        actual   = engine.Infer <Gaussian>(x);

            Console.WriteLine("x = {0} (should be {1})", actual, expected);
            Assert.True(expected.MaxDiff(actual) < 1e-10);
        }
예제 #25
0
        static void Main(string[] args)
        {
            // the winner and loser in each of 6 games
            var winnerData = new[] { 0, 0, 0, 1, 3, 4 };
            var loserData  = new[] { 1, 3, 4, 2, 1, 2 };
            // Define the statistical model as a probabilistic program
            var game         = new Range(winnerData.Length);
            var player       = new Range(winnerData.Concat(loserData).Max() + 1);
            var playerSkills = Variable.Array <double>(player);

            playerSkills[player] = Variable.GaussianFromMeanAndVariance(6, 9).ForEach(player);

            var winners = Variable.Array <int>(game);
            var losers  = Variable.Array <int>(game);

            using (Variable.ForEach(game))
            {
                // the player performance is noisy version of their skill
                var winnerPerformance = Variable.GaussianFromMeanAndVariance(playerSkills[winners[game]], 1.0);
                var loserPerformance  = Variable.GaussianFromMeanAndVariance(playerSkills[losers[game]], 1.0);
                // The winner performed better in this game
                Variable.ConstrainTrue(winnerPerformance > loserPerformance);
            }
            // attach data to model
            winners.ObservedValue = winnerData;
            losers.ObservedValue  = loserData;
            // runing inference
            var inferenceEngine = new InferenceEngine();
            var inferredSkills  = inferenceEngine.Infer <Gaussian[]>(playerSkills);

            // the inferred skills are uncertain, which is captured in their variance
            var orderedPlayerSkills = inferredSkills
                                      .Select((s, i) => new { Player = i, Skill = s })
                                      .OrderByDescending(ps => ps.Skill.GetMean());

            foreach (var playerSkill in orderedPlayerSkills)
            {
                Console.WriteLine($"Player {playerSkill.Player} skill: {playerSkill.Skill}");
            }
        }
예제 #26
0
        public void TruncatedGaussianEntersGate2()
        {
            var x     = Variable.GaussianFromMeanAndPrecision(0, 1).Named("x");
            var xCopy = Variable.Copy(x);

            xCopy.AddAttribute(new MarginalPrototype(new TruncatedGaussian()));
            var l = Variable.Bernoulli(0.9).Named("l");

            using (Variable.If(l))
            {
                var True = Variable.IsPositive(xCopy).Named("VarTrue");
                True.ObservedValue = true;
            }
            using (Variable.IfNot(l))
            {
                var False = Variable.IsPositive(xCopy).Named("VarFalse");
                False.ObservedValue = false;
            }
            var ie = new InferenceEngine(new VariationalMessagePassing());

            Console.WriteLine(ie.Infer(x));
        }
예제 #27
0
파일: Task2_6.cs 프로젝트: Lanayx/InferNet
        public static void Run()
        {
            var questionVsSkills = LoadQuestionsVsSkills();
            var questiosVsUsers  = LoadQuestionsVsUsers();
            var indexes          = GetIndexes(questionVsSkills);

            var skills     = new Range(7);
            var skillsVars = Variable.Array <bool>(skills).Named("skillsVars");

            skillsVars[skills] = Variable.Bernoulli(0.5).ForEach(skills);

            var questions      = new Range(48);
            var correctAnswers = Variable.Array <bool>(questions);

            var indices = ConvertIndexes(indexes, questions);

            using (Variable.ForEach(questions))
            {
                var results = Variable.Subarray <bool>(skillsVars, indices[questions]);
                var hasAllRelevantSkills = Variable.AllTrue(results);
                using (Variable.If(hasAllRelevantSkills))
                    correctAnswers[questions].SetTo(Variable.Bernoulli(0.9));
                using (Variable.IfNot(hasAllRelevantSkills))
                    correctAnswers[questions].SetTo(Variable.Bernoulli(Variable.Beta(3, 7)));
            }
            var engine = new InferenceEngine();

            engine.ShowProgress = false;
            foreach (var userAnswers in questiosVsUsers)
            {
                correctAnswers.ObservedValue = userAnswers;
                dynamic result = engine.Infer(skillsVars);
                foreach (var item in result)
                {
                    Console.Write(item.ToString().Substring(9));
                }
                Console.WriteLine();
            }
        }
예제 #28
0
        public void RunToConvergence(double tolerance = 1e-4)
        {
            engine.Compiler.ReturnCopies = true;
            //engine.ShowProgress = true;
            Diffable oldPost = null;

            for (int i = 1; i < 1000; i++)
            {
                engine.NumberOfIterations = i;
                Diffable newPost = engine.Infer <Diffable>(difficulty);
                if (oldPost != null)
                {
                    double activity = newPost.MaxDiff(oldPost);
                    //Console.WriteLine(activity);
                    if (activity < tolerance)
                    {
                        break;
                    }
                }
                oldPost = newPost;
            }
        }
예제 #29
0
        public void MP_Switch()
        {
            var   T_val_range = new Range(2);
            Range T_cat_range = new Range(2);
            var   T_cat_val_p = Variable.Array <Vector>(T_cat_range).Named("T_cat_val_p");

            using (Variable.ForEach(T_cat_range))
            {
                T_cat_val_p[T_cat_range] = Variable.DirichletUniform(T_val_range);
            }
            var v3 = Variable.DiscreteUniform(T_cat_range);
            var v1 = Variable.New <Vector>();

            using (Variable.Switch(v3))
            {
                v1.SetTo(Variable.Copy(T_cat_val_p[v3]));
            }

            InferenceEngine engine = new InferenceEngine();

            Console.WriteLine(engine.Infer(v1));
        }
예제 #30
0
        private static void Experiment_1()
        {
            // PM erstellen
            Variable <bool> ersteMünzeWurf  = Variable.Bernoulli(0.5);
            Variable <bool> zweiteMünzeWurf = Variable.Bernoulli(0.5);
            Variable <bool> beideMünzenWurf = ersteMünzeWurf & zweiteMünzeWurf;

            // Inferenz-Engine (IE) erstellen
            InferenceEngine engine = new InferenceEngine();

#if SHOW_MODEL
            engine.ShowFactorGraph = true; // PM visualisieren
#endif

            // Inferenz ausführen
            Bernoulli ergebnis = engine.Infer <Bernoulli>(beideMünzenWurf);
            double    beideMünzenZeigenKöpfe = ergebnis.GetProbTrue();


            Console.WriteLine("Die Wahrscheinlichkeit - beide Münzen " +
                              "zeigen Köpfe: {0}", beideMünzenZeigenKöpfe);
        }
예제 #31
0
        public void ConstrainEqualCycleTest()
        {
            double xPrior = 0.1;
            double yPrior = 0.2;
            double zPrior = 0.3;
            double wPrior = 0.4;
            var    x      = Variable.Bernoulli(xPrior).Named("x");
            var    y      = Variable.Bernoulli(yPrior).Named("y");
            var    z      = Variable.Bernoulli(zPrior).Named("z");
            var    w      = Variable.Bernoulli(wPrior).Named("w");

            Variable.ConstrainEqual(x, y);
            Variable.ConstrainEqual(y, z);
            Variable.ConstrainEqual(x, z);
            Variable.ConstrainEqual(z, w);
            w.ObservedValue = true;
            InferenceEngine engine    = new InferenceEngine();
            Bernoulli       xActual   = engine.Infer <Bernoulli>(x);
            Bernoulli       xExpected = new Bernoulli(1);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
        }
예제 #32
0
        internal void LogisticIrtProductExpTest()
        {
            int   numStudents = 20;
            Range student     = new Range(numStudents).Named("students");
            var   ability     = Variable.Array <double>(student).Named("ability");

            ability[student] = Variable.GaussianFromMeanAndPrecision(0, 1e-6).ForEach(student);
            int   numQuestions = 4;
            Range question     = new Range(numQuestions).Named("questions");
            var   difficulty   = Variable.Array <double>(question).Named("difficulty");

            difficulty[question] = Variable.GaussianFromMeanAndPrecision(0, 1e-6).ForEach(question);
            var logDisc = Variable.Array <double>(question).Named("logDisc");

            logDisc[question] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(question);
            var response = Variable.Array <bool>(student, question).Named("response");
            var minus    = Variable.Array <double>(student, question).Named("minus");

            minus[student, question] = (ability[student] - difficulty[question]);
            var product = Variable.Array <double>(student, question).Named("product");

            product[student, question]  = Variable.ProductExp(minus[student, question], logDisc[question]);
            response[student, question] = Variable.BernoulliFromLogOdds(product[student, question]);
            bool[,] data = new bool[numStudents, numQuestions];
            for (int i = 0; i < numStudents; i++)
            {
                for (int j = 0; j < numQuestions; j++)
                {
                    data[i, j] = (i > j);
                }
            }
            response.ObservedValue = data;
            InferenceEngine engine = new InferenceEngine();

            engine.ShowFactorGraph = true;
            engine.Algorithm       = new VariationalMessagePassing();
            Console.WriteLine(engine.Infer(logDisc));
        }
예제 #33
0
        public void ConstantPropagationTest2()
        {
            var a = Variable.Bernoulli(0.5).Named("a");
            var b = Variable.Bernoulli(0.5).Named("b");
            var c = Variable.Bernoulli(0.5).Named("c");
            var d = Variable.Bernoulli(0.5).Named("d");

            using (Variable.If(c))
            {
                Variable.ConstrainTrue(d);
            }
            Variable.ConstrainEqual(b, c);
            using (Variable.IfNot(a))
            {
                Variable.ConstrainTrue(b);
            }
            Variable.ConstrainTrue(a);
            InferenceEngine engine    = new InferenceEngine();
            Bernoulli       dActual   = engine.Infer <Bernoulli>(d);
            Bernoulli       dExpected = new Bernoulli(2.0 / 3);

            Assert.Equal(dActual, dExpected);
        }
예제 #34
0
        private static void Experiment_3()
        {
            // Wahrscheinlichkeit von 6 Würfelaugen ist 1 zu 6 = 1/6 ca 0,17
            double erfolgWurfel = 0.17;
            // PM erstellen
            Variable <bool> ersterWürfelWurf  = Variable.Bernoulli(erfolgWurfel);
            Variable <bool> zweiterWürfelWurf = Variable.Bernoulli(erfolgWurfel);
            Variable <bool> dritterWürfelWurf = Variable.Bernoulli(erfolgWurfel);
            Variable <bool> alleWürfelWurf    = ersterWürfelWurf & zweiterWürfelWurf & dritterWürfelWurf;

            // Inferenz-Engine (IE) erstellen
            InferenceEngine engine = new InferenceEngine();

#if SHOW_MODEL
            engine.ShowFactorGraph = true; // PM visualisieren
#endif

            // Inferenz ausführen - alle Würfel zeigen 6 Augen
            Bernoulli ergebnis1             = engine.Infer <Bernoulli>(alleWürfelWurf);
            double    alleWürfelZeigenSechs = ergebnis1.GetProbTrue();

            Console.WriteLine("Die Wahrscheinlichkeit, dass alle Würfel \"6\" zeigen, ist: {0}", alleWürfelZeigenSechs);
        }
 public void BernoulliFromLogOddsUnivariate()
 {
     Rand.Restart(12347);
     Variable<double> x = Variable.GaussianFromMeanAndVariance(0, 1).Named("x");
     Variable<bool> s = Variable.BernoulliFromLogOdds(x).Named("s");
     s.ObservedValue = true;
     InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());
     ie.Compiler.GivePriorityTo(typeof (LogisticOp));
     ie.Compiler.GivePriorityTo(typeof (BernoulliFromLogOddsOp));
     Gaussian xActual = ie.Infer<Gaussian>(x);
     Console.WriteLine("x = {0}", xActual);
     double m, v;
     xActual.GetMeanAndVariance(out m, out v);
     double matlabM = 0.413126805979683;
     double matlabV = 0.828868291887001;
     Gaussian xExpected = new Gaussian(matlabM, matlabV);
     double relErr = System.Math.Abs((m - matlabM)/ matlabM);
     Console.WriteLine("Posterior mean is {0} should be {1} (relErr = {2})", m, matlabM, relErr);
     Assert.True(relErr < 1e-6);
     relErr = System.Math.Abs((v - matlabV)/ matlabV);
     Console.WriteLine("Posterior variance is {0} should be {1} (relErr = {2})", v, matlabV, relErr);
     Assert.True(relErr < 1e-6);
 }
        public void SimpleGatedModelTest1()
        {
            const double SelectorProbabilityTrue = 0.3;

            Variable <string> str      = Variable.New <string>().Named("str");
            Variable <bool>   selector = Variable.Bernoulli(SelectorProbabilityTrue).Named("selector");

            using (Variable.If(selector))
            {
                str.SetTo("a");
            }

            using (Variable.IfNot(selector))
            {
                str.SetTo("b");
            }

            var engine       = new InferenceEngine();
            var strPosterior = engine.Infer <StringDistribution>(str);

            StringInferenceTestUtilities.TestProbability(strPosterior, SelectorProbabilityTrue, "a");
            StringInferenceTestUtilities.TestProbability(strPosterior, 1 - SelectorProbabilityTrue, "b");
        }
예제 #37
0
        public void SemanticWebTest2()
        {
            var prop0Dist = StringDistribution.OneOf("Anthony Blair", "Tony Blair");
            var prop1Dist = StringDistribution.OneOf("6 May 1953", "May 6, 1953");

            var prop0    = Variable.Random(prop0Dist);
            var prop1    = Variable.Random(prop1Dist);
            var template = Variable.Random(StringDistribution.Any());
            var text     = Variable.StringFormat(template, prop0, prop1);

            var engine = new InferenceEngine();

            engine.Compiler.RecommendedQuality = QualityBand.Experimental;
            engine.NumberOfIterations          = 1;

            var textDist = engine.Infer <StringDistribution>(text);

            Console.WriteLine("textDist={0}", textDist);

            Assert.False(double.IsNegativeInfinity(textDist.GetLogProb("6 May 1953 is the date of birth of Tony Blair.")));
            Assert.False(double.IsNegativeInfinity(textDist.GetLogProb("6 May 1953 is the date of birth of Anthony Blair.")));
            Assert.False(double.IsNegativeInfinity(textDist.GetLogProb("Mr. Tony Blair was born on May 6, 1953.")));
        }
예제 #38
0
    public static void TruncatedGaussian(double mean, double variance)
    {
        //Conditional Random Variable
        Variable <double> threshold = Variable.New <double>().Named("threshold");
        Variable <double> x         = Variable.GaussianFromMeanAndVariance(0, 1).Named("x");

        Variable.ConstrainTrue(x > threshold);
        InferenceEngine engine = new InferenceEngine();

        if (engine.Algorithm is ExpectationPropagation)
        {
            for (double thresh = 0; thresh <= 1; thresh += 0.1)
            {
                threshold.ObservedValue = thresh;
                //moment-matched Gaussian distribution.
                Console.WriteLine($"Dist over x given thresh of {thresh}= {engine.Infer(x)}");
            }
        }
        else
        {
            Console.WriteLine("This example only runs with Expectation Propagation");
        }
    }
        public void SimpleGatedModelTest2()
        {
            const double SelectorProbabilityTrue = 0.3;

            Variable <string> str      = Variable.Random(StringDistribution.OneOf("a", "b", "c")).Named("str");
            Variable <bool>   selector = Variable.Bernoulli(SelectorProbabilityTrue).Named("selector");

            using (Variable.If(selector))
            {
                Variable.ConstrainEqual(str, "a");
            }

            using (Variable.IfNot(selector))
            {
                Variable.ConstrainEqual(str, "b");
            }

            var engine       = new InferenceEngine();
            var strPosterior = engine.Infer <StringDistribution>(str);

            StringInferenceTestUtilities.TestProbability(strPosterior, SelectorProbabilityTrue, "a");
            StringInferenceTestUtilities.TestProbability(strPosterior, 1 - SelectorProbabilityTrue, "b");
        }
예제 #40
0
        public void ConstrainEqualGateExitTest4()
        {
            double bPrior = 0.1;
            var    b      = Variable.Bernoulli(bPrior).Named("b");
            var    x      = Variable.New <bool>().Named("x");

            using (Variable.If(b))
            {
                x.SetTo(Variable.Bernoulli(0.2));
            }
            using (Variable.IfNot(b))
            {
                x.SetTo(Variable.Bernoulli(0.3));
            }
            Variable.ConstrainEqual(x, true);
            InferenceEngine engine     = new InferenceEngine();
            Bernoulli       xActual    = engine.Infer <Bernoulli>(x);
            double          evExpected = bPrior * 0.2 + (1 - bPrior);
            Bernoulli       xExpected  = new Bernoulli(1);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
            Assert.True(xExpected.MaxDiff(xActual) < 1e-10);
        }
예제 #41
0
        static void Main(string[] args)
        {
            Variable <double> X = Variable.GaussianFromMeanAndVariance(12.5, 5 * 5);
            Variable <double> Y = Variable.GaussianFromMeanAndVariance(15, 5 * 5);

            Variable <bool> Ywins = (Y > X);

            //Ywins.ObservedValue = true;

            var engine = new InferenceEngine();

            engine.Algorithm = new ExpectationPropagation();

            //Gaussian XperfPosterior = engine.Infer<Gaussian>(X);
            //Gaussian YperfPosterior = engine.Infer<Gaussian>(Y);

            var YwinsPosterior = engine.Infer <Bernoulli>(Ywins);

            //Console.WriteLine(Xperf);
            //Console.WriteLine(Yperf);

            Console.WriteLine(YwinsPosterior);
        }
예제 #42
0
        static void Main(string[] args)
        {
            var winnerData = new [] { 0, 0, 0, 1, 3, 4 };
            var loserData  = new [] { 1, 3, 4, 2, 1, 2 };

            var game         = new Range(winnerData.Length);
            var player       = new Range(winnerData.Concat(loserData).Max() + 1);
            var playerSkills = Variable.Array <double>(player);

            playerSkills[player] = Variable.GaussianFromMeanAndVariance(6, 9).ForEach(player);

            var winners = Variable.Array <int>(game);
            var losers  = Variable.Array <int>(game);

            using (Variable.ForEach(game))
            {
                var winnerPerformance = Variable.GaussianFromMeanAndVariance(playerSkills[winners[game]], 1.0);
                var loserPerformance  = Variable.GaussianFromMeanAndVariance(playerSkills[losers[game]], 1.0);

                Variable.ConstrainTrue(winnerPerformance > loserPerformance);
            }

            winners.ObservedValue = winnerData;
            losers.ObservedValue  = loserData;

            var inferenceEngine = new InferenceEngine();
            var inferredSkills  = inferenceEngine.Infer <Gaussian[]>(playerSkills);

            var orderedPlayerSkills = inferredSkills
                                      .Select((x, i) => new { Player = i, Skill = x })
                                      .OrderByDescending(x => x.Skill.GetMean());

            foreach (var playerSkill in orderedPlayerSkills)
            {
                Console.WriteLine($"Player {playerSkill.Player} skill: {playerSkill.Skill}");
            }
        }
예제 #43
0
        public static void Test()
        {
            var inputs = Enumerable.Range(0, 50).Select(i => Vector.Constant(1, i)).ToArray();
            var data   = inputs.Select(j => Math.Cos(2 * j[0] / 10.0)).ToArray();
            var n      = new Range(data.Length);
            //var kf = new SummationKernel(new ARD(new double[]{ 0 }, 0))+new WhiteNoise();
            var kf = new SummationKernel(new SquaredExponential()) + new WhiteNoise();
            var y  = Variable <Vector> .Factor <double, Vector[], int[], KernelFunction>(MyFactors.GP, 1.0 /*Variable.GammaFromShapeAndRate(1,1)*/, inputs, new int[] { 0, 1 },
                                                                                         kf);

            GPFactor.settings = new Settings
            {
                solverMethod = Settings.SolverMethod.GradientDescent,
            };
            y.AddAttribute(new MarginalPrototype(new VectorGaussian(n.SizeAsInt)));
            var y2 = Variable.ArrayFromVector(y, n);

            y2.ObservedValue = data;
            var ypredictive = Variable.ArrayFromVector(y, n);
            var ie          = new InferenceEngine(new VariationalMessagePassing());
            var post        = ie.Infer <Gaussian[]>(ypredictive);

            var mplWrapper = new MatplotlibWrapper();

            mplWrapper.AddArray("x", inputs.Select(j => j[0]).ToArray());
            mplWrapper.AddArray("y", data);
            var f = post.Select(i => i.GetMean()).ToArray();
            var e = post.Select(i => Math.Sqrt(i.GetVariance())).ToArray();

            mplWrapper.AddArray("f", f);
            mplWrapper.AddArray("e", e);

            mplWrapper.Plot(new string[] {
                "fill_between(x,f-e,f+e,color=\"gray\")",
                "scatter(x,y)"
            });
        }
예제 #44
0
        public void ConstrainEqualCaseArrayTest()
        {
            Range i = new Range(4).Named("i");
            VariableArray <double> x = Variable.Array <double>(i).Named("x");
            VariableArray <int>    c = Variable.Array <int>(i).Named("c");

            using (Variable.ForEach(i))
            {
                c[i] = Variable.Discrete(new double[] { 0.5, 0.5 });
                using (Variable.Case(c[i], 0))
                {
                    x[i] = Variable.GaussianFromMeanAndVariance(1, 1);
                }
                using (Variable.Case(c[i], 1))
                {
                    x[i] = Variable.GaussianFromMeanAndVariance(2, 1);
                }
            }

            VariableArray <double> data = Variable.Observed(new double[] { 0.9, 1.1, 1.9, 2.1 }, i).Named("data");

            Variable.ConstrainEqual(x[i], data[i]);
            InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing());

            Discrete[] cExpectedArray = new Discrete[]
            {
                new Discrete(0.6457, 0.3543),
                new Discrete(0.5987, 0.4013), new Discrete(0.4013, 0.5987),
                new Discrete(0.3543, 0.6457)
            };
            IDistribution <int[]> cExpected = Distribution <int> .Array(cExpectedArray);

            object cActual = engine.Infer(c);

            Console.WriteLine(StringUtil.JoinColumns("c = ", cActual, " should be ", cExpected));
            Assert.True(cExpected.MaxDiff(cActual) < 1e-4);
        }
예제 #45
0
        public void GatedModelObservedOutputTest2()
        {
            Variable <bool>   selector = Variable.Bernoulli(0.5);
            Variable <string> substr   = Variable.New <string>();

            using (Variable.If(selector))
            {
                Variable <string> str = Variable.Random(StringDistribution.OneOf("dbc", "abcd", "abcc"));
                substr.SetTo(Variable.Substring(str, 1, 2));
            }

            using (Variable.IfNot(selector))
            {
                Variable <string> str = Variable.StringLower();
                substr.SetTo(Variable.Substring(str, 0, 2));
            }

            substr.ObservedValue = "bc";

            var engine            = new InferenceEngine();
            var selectorPosterior = engine.Infer <Bernoulli>(selector);

            Assert.Equal(1.0 / (1.0 + (1.0 / (26 * 26))), selectorPosterior.GetProbTrue(), LogProbEps);
        }
        public void ImpossibleBranchTest2()
        {
            Variable <string> subStr   = Variable.New <string>().Named("subStr");
            Variable <int>    selector = Variable.DiscreteUniform(2).Named("selector");

            using (Variable.Case(selector, 0))
            {
                var str = Variable.StringUniform();
                subStr.SetTo(Variable.Substring(str, 2, 2));
            }

            using (Variable.Case(selector, 1))
            {
                var str = Variable.StringUniform();
                subStr.SetTo(Variable.Substring(str, 2, 3));
            }

            subStr.ObservedValue = "ab";

            var engine            = new InferenceEngine();
            var selectorPosterior = engine.Infer <Discrete>(selector);

            Assert.Equal(1.0, selectorPosterior[0]);
        }
예제 #47
0
        public void GatedModelUncertainOutputTest()
        {
            Variable <bool>   selector = Variable.Bernoulli(0.5);
            Variable <string> substr   = Variable.New <string>();

            using (Variable.If(selector))
            {
                Variable <string> str = Variable.Random(StringDistribution.OneOf("bcad", "bacd", "bca"));
                substr.SetTo(Variable.Substring(str, 0, 2));
            }

            using (Variable.IfNot(selector))
            {
                Variable <string> str = Variable.Random(StringDistribution.OneOf("dbc", "abdd", "a"));
                substr.SetTo(Variable.Substring(str, 1, 2));
            }

            Variable.ConstrainEqualRandom(substr, StringDistribution.OneOf("bc", "ba"));

            var engine            = new InferenceEngine();
            var selectorPosterior = engine.Infer <Bernoulli>(selector);

            Assert.Equal(2.0 / 3.0, selectorPosterior.GetProbTrue(), LogProbEps);
        }
예제 #48
0
        //Testing online learning :)
        //    [Fact]
        internal void OnlineGaussian()
        {
            // make ddata
            double psi;
            int    T = 1;

            double[] data = CreateData("Parabola", T, out psi);
            Console.WriteLine("psi = {0}", psi);
            for (int i = 0; i < T; i++)
            {
                Console.Write(" " + data[i]);
            }
            Console.WriteLine();

            InferenceEngine ie = new InferenceEngine();

            ie.ShowFactorGraph = true;
            Variable <double>   m;
            Variable <Gaussian> mprior = Variable.Observed(new Gaussian(data[0], 10 + psi));

            //   m = Variable.GaussianFromMeanAndVariance(Variable.Random<double, Gaussian>(mprior), psi).Named("m");
            m = Variable.Random <double, Gaussian>(mprior).Named("m");
            Variable <Gaussian> xObsDist = Variable.Observed(new Gaussian(data[0], 1));

            Variable.ConstrainEqualRandom <double, Gaussian>(m, xObsDist);
            for (int t = 0; t < T; t++)
            {
                xObsDist.ObservedValue = new Gaussian(data[t], 1);
                Gaussian mpost = ie.Infer <Gaussian>(m);
                double   mpost_mean, mpost_var;
                mpost.GetMeanAndVariance(out mpost_mean, out mpost_var);
                mprior.ObservedValue = new Gaussian(mpost_mean, mpost_var + psi);
                // mprior.Value = ie.Infer<Gaussian>(m);
                Console.WriteLine(mpost);
            }
        }
예제 #49
0
        public void LocalArrayMarginalPrototypeTest()
        {
            var TA_size  = Variable.New <int>().Named("TA_size");
            var TA_range = new Range(TA_size).Named("TA_range");
            var TB_size  = Variable.New <int>().Named("TB_size");
            var TB_range = new Range(TB_size).Named("TB_range");
            var TB_B     = Variable.Array <int>(TB_range).Named("TB_B");

            TB_B.SetValueRange(TA_range);

            using (Variable.ForEach(TB_range))
            {
                var TA_range_clone = TA_range.Clone();
                var array          = Variable.Array <double>(TA_range_clone).Named("array");
                using (Variable.ForEach(TA_range_clone))
                {
                    array[TA_range_clone].SetTo(Variable.Constant(1.0));
                }
                var vector = Variable.Vector(array).Named("vector");
                TB_B[TB_range].SetTo(Variable.Discrete(TA_range, vector));
            }
            var indices = Variable.Array <int>(new Range(2).Named("indices_range")).Named("indices");

            indices.ObservedValue = new int[] { 0, 1 };
            var TB_B_Subarray = Variable.Subarray(TB_B, indices).Named("TB_B_Subarray");

            TB_B_Subarray.ObservedValue = new int[] { 0, 1 };

            TA_size.ObservedValue = 3;
            TB_size.ObservedValue = 3;

            InferenceEngine engine = new InferenceEngine();

            engine.OptimiseForVariables = new IVariable[] { TB_B };
            Console.WriteLine("Z=" + engine.Infer(TB_B));
        }
예제 #50
0
        /// <summary>
        /// Infers the posteriors of BCC using the attached data and priors.
        /// </summary>
        /// <param name="taskIndices">The matrix of the task indices (columns) of each worker (rows).</param>
        /// <param name="workerLabels">The matrix of the labels (columns) of each worker (rows).</param>
        /// <param name="priors">The priors of the BCC parameters.</param>
        /// <returns></returns>
        public virtual BCCPosteriors Infer(int[][] taskIndices, int[][] workerLabels, BCCPosteriors priors)
        {
            int workerCount = workerLabels.Length;

            SetPriors(workerCount, priors);
            AttachData(taskIndices, workerLabels, null);
            var result = new BCCPosteriors();

            Engine.NumberOfIterations    = NumberOfIterations;
            result.Evidence              = Engine.Infer <Bernoulli>(Evidence);
            result.BackgroundLabelProb   = Engine.Infer <Dirichlet>(BackgroundLabelProb);
            result.WorkerConfusionMatrix = Engine.Infer <Dirichlet[][]>(WorkerConfusionMatrix);
            result.TrueLabel             = Engine.Infer <Discrete[]>(TrueLabel);
            result.TrueLabelConstraint   = Engine.Infer <Discrete[]>(TrueLabel, QueryTypes.MarginalDividedByPrior);

            // Prediction mode is indicated by none of the workers having a label.
            // We can just look at the first one
            if (workerLabels[0] == null)
            {
                result.WorkerPrediction = Engine.Infer <Discrete[][]>(WorkerLabel);
            }

            return(result);
        }
예제 #51
0
		public void Run()
		{
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			} 
			
			Rand.Restart(0);

			int nQuestions = 100;
			int nSubjects = 40;
			int nChoices = 4;
			Gaussian abilityPrior = new Gaussian(0, 1);
			Gaussian difficultyPrior = new Gaussian(0, 1);
			Gamma discriminationPrior = Gamma.FromShapeAndScale(5, 1);

			double[] trueAbility, trueDifficulty, trueDiscrimination;
			int[] trueTrueAnswer;
			int[][] data = Sample(nSubjects, nQuestions, nChoices, abilityPrior, difficultyPrior, discriminationPrior,
				out trueAbility, out trueDifficulty, out trueDiscrimination, out trueTrueAnswer);

			Range question = new Range(nQuestions).Named("question");
			Range subject = new Range(nSubjects).Named("subject");
			Range choice = new Range(nChoices).Named("choice");
			var response = Variable.Array(Variable.Array<int>(question), subject).Named("response");
			response.ObservedValue = data;

			var ability = Variable.Array<double>(subject).Named("ability");
			ability[subject] = Variable.Random(abilityPrior).ForEach(subject);
			var difficulty = Variable.Array<double>(question).Named("difficulty");
			difficulty[question] = Variable.Random(difficultyPrior).ForEach(question);
			var discrimination = Variable.Array<double>(question).Named("discrimination");
			discrimination[question] = Variable.Random(discriminationPrior).ForEach(question);
			var trueAnswer = Variable.Array<int>(question).Named("trueAnswer");
			trueAnswer[question] = Variable.DiscreteUniform(nChoices).ForEach(question);

			using (Variable.ForEach(subject)) {
				using (Variable.ForEach(question)) {
					var advantage = (ability[subject] - difficulty[question]).Named("advantage");
					var advantageNoisy = Variable.GaussianFromMeanAndPrecision(advantage, discrimination[question]).Named("advantageNoisy");
					var correct = (advantageNoisy > 0).Named("correct");
					using (Variable.If(correct))
						response[subject][question] = trueAnswer[question];
					using (Variable.IfNot(correct))
						response[subject][question] = Variable.DiscreteUniform(nChoices);
				}
			}

			engine.NumberOfIterations = 5;
			subject.AddAttribute(new Sequential());  // needed to get stable convergence
			engine.Compiler.UseSerialSchedules = true;
			if (false) {
				// set this to do majority voting
				ability.ObservedValue = Util.ArrayInit(nSubjects, i => 0.0);
				difficulty.ObservedValue = Util.ArrayInit(nQuestions, i => 0.0);
				discrimination.ObservedValue = Util.ArrayInit(nQuestions, i => 1.0);
			}
			var trueAnswerPosterior = engine.Infer<IList<Discrete>>(trueAnswer);
			int numCorrect = 0;
			for (int q = 0; q < nQuestions; q++) {
				int bestGuess = trueAnswerPosterior[q].GetMode();
				if (bestGuess == trueTrueAnswer[q]) numCorrect++;
			}
			double pctCorrect = 100.0*numCorrect/nQuestions;
			Console.WriteLine("{0}% TrueAnswers correct", pctCorrect.ToString("f0"));
			var difficultyPosterior = engine.Infer<IList<Gaussian>>(difficulty);
			for (int q = 0; q < Math.Min(nQuestions, 4); q++) {
				Console.WriteLine("difficulty[{0}] = {1} (sampled from {2})", q, difficultyPosterior[q], trueDifficulty[q].ToString("g2"));
			}
			var discriminationPosterior = engine.Infer<IList<Gamma>>(discrimination);
			for (int q = 0; q < Math.Min(nQuestions, 4); q++) {
				Console.WriteLine("discrimination[{0}] = {1} (sampled from {2})", q, discriminationPosterior[q], trueDiscrimination[q].ToString("g2"));
			}
			var abilityPosterior = engine.Infer<IList<Gaussian>>(ability);
			for (int s = 0; s < Math.Min(nSubjects, 4); s++) {
				Console.WriteLine("ability[{0}] = {1} (sampled from {2})", s, abilityPosterior[s], trueAbility[s].ToString("g2"));
			}
		}
        public LabelArray CompleteMasks(bool[][,] shapeMasks, bool[][,] availableMaskPixels)
        {
            Debug.Assert(this.initialized);
            Debug.Assert(!this.working);
            Debug.Assert(shapeMasks.Length == availableMaskPixels.Length);
            Debug.Assert(shapeMasks.All(mask => mask.GetLength(0) == this.gridWidth.ObservedValue && mask.GetLength(1) == this.gridHeight.ObservedValue));
            Debug.Assert(availableMaskPixels.All(mask => mask.GetLength(0) == this.gridWidth.ObservedValue && mask.GetLength(1) == this.gridHeight.ObservedValue));

            this.working = true;

            this.observationCount.ObservedValue = shapeMasks.Length;

            this.InitializePerObservation();

            for (int i = 0; i < shapeMasks.Length; ++i)
            {
                for (int x = 0; x < this.gridWidth.ObservedValue; ++x)
                {
                    for (int y = 0; y < this.gridHeight.ObservedValue; ++y)
                    {
                        if (availableMaskPixels[i][x, y])
                        {
                            this.noisyLabelsConstraint.ObservedValue[i][x, y] = Bernoulli.PointMass(shapeMasks[i][x, y]);
                        }
                    }
                }
            }

            var engine = new InferenceEngine();
            engine.Compiler.RequiredQuality = QualityBand.Unknown;
            engine.Compiler.RecommendedQuality = QualityBand.Unknown;
            //engine.Compiler.UseParallelForLoops = true;
            engine.OptimiseForVariables = new IVariable[]
            {
                this.labels,
            };

            for (int iteration = this.IterationsBetweenCallbacks; iteration <= this.MaskCompletionIterationCount; iteration += this.IterationsBetweenCallbacks)
            {
                engine.NumberOfIterations = iteration;
                LabelArray completedLabels = engine.Infer<LabelArray>(this.labels);

                if (this.MaskCompletionProgress != null)
                {
                    this.MaskCompletionProgress(this, new MaskCompletionProgressEventArgs(iteration, completedLabels));
                }
            }

            engine.NumberOfIterations = this.MaskCompletionIterationCount;
            var result = engine.Infer<LabelArray>(this.labels);

            this.working = false;
            return result;
        }
예제 #53
0
		public void Run()
		{
			// This example requires EP
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation)) {
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			}

			// Define counts
			int numUsers = 50;
			int numItems = 10;
			int numTraits = 2;
			Variable<int> numObservations = Variable.Observed(100).Named("numObservations");
			int numLevels = 2;

			// Define ranges
			Range user = new Range(numUsers).Named("user");
			Range item = new Range(numItems).Named("item");
			Range trait = new Range(numTraits).Named("trait");
			Range observation = new Range(numObservations).Named("observation");
			Range level = new Range(numLevels).Named("level");

			// Define latent variables
			var userTraits = Variable.Array(Variable.Array<double>(trait), user).Named("userTraits");
			var itemTraits = Variable.Array(Variable.Array<double>(trait), item).Named("itemTraits");
			var userBias = Variable.Array<double>(user).Named("userBias");
			var itemBias = Variable.Array<double>(item).Named("itemBias");
			var userThresholds = Variable.Array(Variable.Array<double>(level), user).Named("userThresholds");

			// Define priors
			var userTraitsPrior = Variable.Array(Variable.Array<Gaussian>(trait), user).Named("userTraitsPrior");
			var itemTraitsPrior = Variable.Array(Variable.Array<Gaussian>(trait), item).Named("itemTraitsPrior");
			var userBiasPrior = Variable.Array<Gaussian>(user).Named("userBiasPrior");
			var itemBiasPrior = Variable.Array<Gaussian>(item).Named("itemBiasPrior");
			var userThresholdsPrior = Variable.Array(Variable.Array<Gaussian>(level), user).Named("userThresholdsPrior");

			// Define latent variables statistically
			userTraits[user][trait] = Variable<double>.Random(userTraitsPrior[user][trait]);
			itemTraits[item][trait] = Variable<double>.Random(itemTraitsPrior[item][trait]);
			userBias[user] = Variable<double>.Random(userBiasPrior[user]);
			itemBias[item] = Variable<double>.Random(itemBiasPrior[item]);
			userThresholds[user][level] = Variable<double>.Random(userThresholdsPrior[user][level]);

			// Initialise priors
			Gaussian traitPrior = Gaussian.FromMeanAndVariance(0.0, 1.0);
			Gaussian biasPrior = Gaussian.FromMeanAndVariance(0.0, 1.0);

			userTraitsPrior.ObservedValue = Util.ArrayInit(numUsers, u => Util.ArrayInit(numTraits, t => traitPrior));
			itemTraitsPrior.ObservedValue = Util.ArrayInit(numItems, i => Util.ArrayInit(numTraits, t => traitPrior));
			userBiasPrior.ObservedValue = Util.ArrayInit(numUsers, u => biasPrior);
			itemBiasPrior.ObservedValue = Util.ArrayInit(numItems, i => biasPrior);
			userThresholdsPrior.ObservedValue = Util.ArrayInit(numUsers, u =>
					Util.ArrayInit(numLevels, l => Gaussian.FromMeanAndVariance(l - numLevels / 2.0 + 0.5, 1.0)));

			// Break symmetry and remove ambiguity in the traits
			for (int i = 0; i < numTraits; i++) {
				// Assume that numTraits < numItems
				for (int j = 0; j < numTraits; j++) {
					itemTraitsPrior.ObservedValue[i][j] = Gaussian.PointMass(0);
				}
				itemTraitsPrior.ObservedValue[i][i] = Gaussian.PointMass(1);
			}

			// Declare training data variables
			var userData = Variable.Array<int>(observation).Named("userData");
			var itemData = Variable.Array<int>(observation).Named("itemData");
			var ratingData = Variable.Array(Variable.Array<bool>(level), observation).Named("ratingData");

			// Set model noises explicitly
			Variable<double> affinityNoiseVariance = Variable.Observed(0.1).Named("affinityNoiseVariance");
			Variable<double> thresholdsNoiseVariance = Variable.Observed(0.1).Named("thresholdsNoiseVariance");

			// Model
			using (Variable.ForEach(observation)) {
				VariableArray<double> products = Variable.Array<double>(trait).Named("products");
				products[trait] = userTraits[userData[observation]][trait] * itemTraits[itemData[observation]][trait];

				Variable<double> bias = (userBias[userData[observation]] + itemBias[itemData[observation]]).Named("bias");
				Variable<double> affinity = (bias + Variable.Sum(products).Named("productSum")).Named("affinity");
				Variable<double> noisyAffinity = Variable.GaussianFromMeanAndVariance(affinity, affinityNoiseVariance).Named("noisyAffinity");

				VariableArray<double> noisyThresholds = Variable.Array<double>(level).Named("noisyThresholds");
				noisyThresholds[level] = Variable.GaussianFromMeanAndVariance(userThresholds[userData[observation]][level], thresholdsNoiseVariance);
				ratingData[observation][level] = noisyAffinity > noisyThresholds[level];
			}

			// Observe training data
			GenerateData(numUsers, numItems, numTraits, numObservations.ObservedValue, numLevels,
									 userData, itemData, ratingData,
									 userTraitsPrior.ObservedValue, itemTraitsPrior.ObservedValue,
									 userBiasPrior.ObservedValue, itemBiasPrior.ObservedValue, userThresholdsPrior.ObservedValue,
									 affinityNoiseVariance.ObservedValue, thresholdsNoiseVariance.ObservedValue);

			// Allow EP to process the product factor as if running VMP
			// as in Stern, Herbrich, Graepel paper.
			engine.Compiler.GivePriorityTo(typeof(GaussianProductOp_SHG09));
			engine.Compiler.ShowWarnings = true;

			// Run inference
			var userTraitsPosterior = engine.Infer<Gaussian[][]>(userTraits);
			var itemTraitsPosterior = engine.Infer<Gaussian[][]>(itemTraits);
			var userBiasPosterior = engine.Infer<Gaussian[]>(userBias);
			var itemBiasPosterior = engine.Infer<Gaussian[]>(itemBias);
			var userThresholdsPosterior = engine.Infer<Gaussian[][]>(userThresholds);

			// Feed in the inferred posteriors as the new priors
			userTraitsPrior.ObservedValue = userTraitsPosterior;
			itemTraitsPrior.ObservedValue = itemTraitsPosterior;
			userBiasPrior.ObservedValue = userBiasPosterior;
			itemBiasPrior.ObservedValue = itemBiasPosterior;
			userThresholdsPrior.ObservedValue = userThresholdsPosterior;

			// Make a prediction
			numObservations.ObservedValue = 1;
			userData.ObservedValue = new int[] { 5 };
			itemData.ObservedValue = new int[] { 6 };
			ratingData.ClearObservedValue();

			Bernoulli[] predictedRating = engine.Infer<Bernoulli[][]>(ratingData)[0];
			Console.WriteLine("Predicted rating:");
			foreach (var rating in predictedRating) Console.WriteLine(rating);
		}
 private ShapeFittingInfo InferShapeFitting(InferenceEngine engine)
 {
     return new ShapeFittingInfo(
         engine.Infer<GaussianArray2D>(this.shapeTraits),
         engine.Infer<GaussianArray3D>(this.shapePartLocation),
         engine.Infer<WishartArray2D>(this.shapePartOrientation),
         engine.Infer<GaussianArray1D>(this.globalLogScale));
 }
 private ShapeModel InferShapeModel(InferenceEngine engine)
 {
     return new ShapeModel(
         this.gridWidth.ObservedValue,
         this.gridHeight.ObservedValue,
         this.observationNoiseProbability.ObservedValue,
         engine.Infer<GaussianArray1D>(this.shapeLocationMean),
         engine.Infer<GammaArray1D>(this.shapeLocationPrecision),
         engine.Infer<GaussianArray3D>(this.shapePartOffsetWeights),
         engine.Infer<GaussianArray3D>(this.shapePartLogScaleWeights),
         engine.Infer<GaussianArray2D>(this.shapePartAngleWeights),
         engine.Infer<GammaArray2D>(this.shapePartOffsetPrecisions),
         engine.Infer<GammaArray2D>(this.shapePartLogScalePrecisions),
         engine.Infer<GammaArray1D>(this.shapePartAnglePrecisions));
 }
예제 #56
0
        private static void TrainCovariance(double[][] obs)
        {
            var r = new Range(obs[0].Length).Named("r");
            var c = new Range(obs.Length).Named("c");
            var data = Variable.Array<Vector>(r).Named("matrix");

            var meansPrior = Variable.VectorGaussianFromMeanAndPrecision(Vector.FromArray(new double[obs.Length]), PositiveDefiniteMatrix.IdentityScaledBy(obs.Length, 10));
            var covPrior = Variable.WishartFromShapeAndScale(1, PositiveDefiniteMatrix.Identity(c.SizeAsInt));
            using (Variable.ForEach(r))
            {
                data[r] = Variable.VectorGaussianFromMeanAndPrecision(meansPrior, covPrior);
            }

            data.ObservedValue = GetRows(obs).Select(Vector.FromArray).ToArray();

            var engine = new InferenceEngine(new VariationalMessagePassing());

            var meansPosterior = engine.Infer<VectorGaussian>(meansPrior);

            var covPosterior = engine.Infer<Wishart>(covPrior);
            var covPosteriorVariance = covPosterior.GetMean().Inverse();

            var verificationGaussian = new VectorGaussian(meansPosterior.GetMean(), covPosteriorVariance);
            var verificationSamples = GetRows(Samples(verificationGaussian).Take(1000).ToArray()).ToArray();
            var covM = GetCovarianceMatrix(verificationSamples);
            var verificationCovariance = covM.GetVariance();

            var actual = GetCovarianceMatrix(obs);
            var actualMeans = actual.GetMean();
            var actualCov = actual.GetVariance();
        }
예제 #57
0
        private static void FittingCurveWithBayesian(double[] input, double[] observed, int m)
        {
            if (input.Length != observed.Length)
            {
                throw new ArgumentException("input and observed arrays must be equal in length");
            }

            var rR = new Range(input.Length).Named("r");
            var rM = new Range(m).Named("M");

            var inputArr = new double[input.Length, m];
            for (var r = 0; r < input.Length; r++)
            {
                for (var c = 0; c < m; c++)
                {
                    inputArr[r, c] = Math.Pow(input[r], c);
                }
            }

            var X = Variable.Array<double>(rR, rM).Named("X");
            X.ObservedValue = inputArr;

            var w = Variable.Array<double>(rM).Named("W");
            w[rM] = Variable.GaussianFromMeanAndPrecision(0, 0.01).ForEach(rM);

            var y = Variable.Array<double>(rR).Named("Y");
            using (Variable.ForEach(rR))
            {
                var prods = Variable.Array<double>(rM);
                using (Variable.ForEach(rM))
                {
                    prods[rM] = X[rR, rM]*w[rM];
                }

                y[rR] = Variable.Sum(prods);
            }

            y.ObservedValue = observed;

            var engine = new InferenceEngine();
            var posteriorW = engine.Infer<DistributionStructArray<Gaussian, double>>(w);
            Console.WriteLine("{0} deg poli weights\n{1}", m, posteriorW);
        }
예제 #58
0
        public override void Run()
        {
            var engine = new InferenceEngine();
            var series = new List<LabelledSeries<Tuple<double, double>>>();

            var rand = new Random();
            var input = Enumerable.Range(-100, 200).Select(x => Convert.ToDouble(x) / 100.0).ToArray();
            var expected = input.Select(x => Math.Sin(2 * Math.PI * x)).ToArray();
            var observed = expected.Select(x => x + (rand.NextDouble() - 0.5)).ToArray();

            series.Add(new LabelledSeries<Tuple<double, double>>("Actual sin(2x*Pi)", input.Zip(expected, (f, s) => Tuple.Create(f, s))));

            var m = Variable.New<int>();
            var rR = new Range(input.Length).Named("r");
            var rM = new Range(m).Named("M");
            var X = Variable.Array<double>(rR, rM).Named("X");
            var w = Variable.Array<double>(rM).Named("W");
            var noise = Variable.GammaFromShapeAndScale(1,5);
            w[rM] = Variable.GaussianFromMeanAndPrecision(0, 0.01).ForEach(rM);
            var y = Variable.Array<double>(rR).Named("Y");
            using (Variable.ForEach(rR))
            {
                var prods = Variable.Array<double>(rM);
                using (Variable.ForEach(rM))
                {
                    prods[rM] = X[rR, rM] * w[rM];
                }

                y[rR] = Variable.Sum(prods);
            }

            for (var i = 3; i < 8; i++)
            {
                m.ObservedValue = i;
                var inputArr = new double[input.Length, i];
                for (var r = 0; r < input.Length; r++)
                {
                    for (var c = 0; c < i; c++)
                    {
                        inputArr[r, c] = Math.Pow(input[r], c);
                    }
                }

                X.ObservedValue = inputArr;
                y.ObservedValue = observed;

                var posteriorW = engine.Infer<DistributionStructArray<Gaussian, double>>(w);
                var weights = posteriorW.Select(d => d.GetMean()).ToArray();
                var implied = new double[input.Length];
                for (var r = 0; r < implied.Length; r++)
                {
                    var val = 0.0;
                    for (var c = 0; c < i; c++)
                    {
                        val += Math.Pow(input[r], c) * weights[c];
                    }

                    implied[r] = val;
                }

                var polinomialLabelParts = new List<string>();
                for (var c = 0; c < i; c++)
                {
                    polinomialLabelParts.Add(Math.Round(weights[c], 4) + "*x^" + c);
                }

                var polinomialLabel = string.Join("+", polinomialLabelParts);

                series.Add(new LabelledSeries<Tuple<double, double>>(string.Format("Inferred {0}", polinomialLabel), input.Zip(implied, (f, s) => Tuple.Create(f, s))) { IsScatter = false });
            }

            series.Add(new LabelledSeries<Tuple<double, double>>("Data", input.Zip(observed, (f, s) => Tuple.Create(f, s))) { IsScatter = true });

            this.Series = series.Select(s => new LabelledSeries<Tuple<double, double>>(s.Label, s.Series.Skip(50).Take(100)) { IsScatter = s.IsScatter }).ToArray();
        }
예제 #59
0
        private static void TrainGaussian(double sourceMean, double sourcePrecision, int cycles)
        {
            var source = Gaussian.FromMeanAndPrecision(sourceMean, sourcePrecision);

            // Prior distributions
            var meanPriorDistr = Gaussian.FromMeanAndPrecision(0, 0.01);
            var precisionPriorDistr = Gamma.FromMeanAndVariance(2, 5);

            var meanPrior = Variable.Random(meanPriorDistr).Named("mean");
            var precPrior = Variable.Random(precisionPriorDistr).Named("precision");

            var engine = new InferenceEngine();
            for (var i = 0; i < cycles; i++)
            {
                var x = Variable.GaussianFromMeanAndPrecision(meanPrior, precPrior);
                x.ObservedValue = source.Sample();
            }

            var meanPost = engine.Infer<Gaussian>(meanPrior);
            var precPost = engine.Infer<Gamma>(precPrior);
            var estimate = Variable.GaussianFromMeanAndPrecision(meanPrior, precPrior);
            var estimateDist = engine.Infer<Gaussian>(estimate);
            Console.WriteLine("mean: {0}, prec: {1}", estimateDist.GetMean(), estimateDist.Precision);
        }
예제 #60
0
        private static double InferMixture(Vector[] observedData, int dimensions, int clusters)
        {
            var evidence = Variable.Bernoulli(0.5).Named("evidence");

            var evidenceBlock = Variable.If(evidence);
            var k = new Range(clusters).Named("k");

            // Mixture component means
            var means = Variable.Array<Vector>(k).Named("means");
            means[k] = Variable.VectorGaussianFromMeanAndPrecision(
                Vector.Zero(dimensions),
                PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01)).ForEach(k);

            // Mixture component precisions
            var precs = Variable.Array<PositiveDefiniteMatrix>(k).Named("precs");
            precs[k] = Variable.WishartFromShapeAndRate(100.0, PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01)).ForEach(k);

            // Mixture weights
            var weights = Variable.Dirichlet(k, Enumerable.Range(0, clusters).Select(_ => 1.0).ToArray()).Named("weights");

            // Create a variable array which will hold the data
            var n = new Range(observedData.Length).Named("n");
            var data = Variable.Array<Vector>(n).Named("x");

            // Create latent indicator variable for each data point
            var z = Variable.Array<int>(n).Named("z");

            // The mixture of Gaussians model
            using (Variable.ForEach(n))
            {
                z[n] = Variable.Discrete(weights);
                using (Variable.Switch(z[n]))
                {
                    data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]);
                }
            }

            // Initialise messages randomly so as to break symmetry
            var zinit = new Discrete[n.SizeAsInt];
            for (int i = 0; i < zinit.Length; i++)
                zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt);
            z.InitialiseTo(Distribution<int>.Array(zinit));

            evidenceBlock.CloseBlock();

            // Attach some generated data
            data.ObservedValue = observedData.ToArray();

            // The inference
            var ie = new InferenceEngine(new VariationalMessagePassing());
            ie.ShowProgress = false;
            Console.WriteLine("Dist over pi=" + ie.Infer(weights));
            Console.WriteLine("Dist over means=\n" + ie.Infer(means));
            Console.WriteLine("Dist over precs=\n" + ie.Infer(precs));
            var logEvidence = ie.Infer<Bernoulli>(evidence);
            Console.WriteLine("The model log evidence is {0}", logEvidence.LogOdds);
            return logEvidence.LogOdds;
        }