public VariableArray2D <double> GetPopulationForIteration(int Iterations, double CatPopulation, double MousePopulation) { Variable <int> numTimes = Variable.Observed(Iterations); Range time = new Range(numTimes); Range cols = new Range(2); // frist is Cats and second Mice VariableArray2D <double> days = Variable.Array <double>(time, cols); using (ForEachBlock rowBlock = Variable.ForEach(time)) { var day = rowBlock.Index; using (Variable.If(day == 0)) { Cat.SetNewPopulation(CatPopulation); Mouse.SetNewPopulation(MousePopulation); days[day, 0] = Cat.GetPopulation(); days[day, 1] = Mouse.GetPopulation(); } using (Variable.If(day > 0)) { days[day, 0] = days[day - 1, 0] + GetCatPopulationChange(); days[day, 1] = days[day - 1, 1] + GetMousePopulationChange(); } } return(days); }
public LogisticIrtTestModel(int numParams) { numStudents = Variable.New <int>().Named("numStudents"); numQuestions = Variable.New <int>().Named("numQuestions"); Range student = new Range(numStudents); abilityPriors = Variable.Array <Gaussian>(student).Named("abilityPriors"); ability = Variable.Array <double>(student).Named("ability"); ability[student] = Variable.Random <double, Gaussian>(abilityPriors[student]); Range question = new Range(numQuestions); difficultyPriors = Variable.Array <Gaussian>(question).Named("difficultyPriors"); difficulty = Variable.Array <double>(question).Named("difficulty"); difficulty[question] = Variable.Random <double, Gaussian>(difficultyPriors[question]); discriminationPriors = Variable.Array <Gamma>(question).Named("discriminationPriors"); discrimination = Variable.Array <double>(question).Named("discrimination"); discrimination[question] = Variable.Random <double, Gamma>(discriminationPriors[question]); guessProbPriors = Variable.Array <Beta>(question).Named("guessProbPriors"); guessProb = Variable.Array <double>(question).Named("guessProb"); guessProb[question] = Variable.Random <double, Beta>(guessProbPriors[question]); engine = new InferenceEngine(); //engine.NumberOfIterations = 2; responseProb = Variable.Array <double>(student, question).Named("prob"); if (numParams == 1) { responseProb[student, question] = Variable.Logistic(ability[student] - difficulty[question]); } else if (numParams >= 2) { responseProb[student, question] = Variable.Logistic((ability[student] - difficulty[question]) * discrimination[question]); } }
internal void JudgementModelSparse2() { int numberOfLevels = 2; int[,] Rdata = new int[, ] { { 0, 1 }, { 0, 1 }, { 0, 1 }, { 1, 0 } }; //, { -1, 0 }, { 0, -1 } }; Discrete[,] Rsoft = new Discrete[Rdata.GetLength(0), Rdata.GetLength(1)]; for (int i = 0; i < Rdata.GetLength(0); i++) { for (int j = 0; j < Rdata.GetLength(1); j++) { int r = Rdata[i, j]; Rsoft[i, j] = (r == -1) ? Discrete.Uniform(numberOfLevels) : Discrete.PointMass(r, numberOfLevels); } } Range judges = new Range(Rdata.GetLength(0)); Range docs = new Range(Rdata.GetLength(1)); Vector counts = Vector.Constant(numberOfLevels, 1.0); Variable <Vector> Qprior = Variable.Dirichlet(counts); VariableArray <int> Q = Variable.Array <int>(docs); Q[docs] = Variable.Discrete(Qprior).ForEach(docs); Vector[] alpha = new Vector[numberOfLevels]; VariableArray <Vector>[] B = new VariableArray <Vector> [numberOfLevels]; for (int i = 0; i < alpha.Length; i++) { alpha[i] = Vector.Zero(numberOfLevels); alpha[i].SetAllElementsTo(1); // the off-diagonal pseudocount alpha[i][i] = 2; // the diagonal pseudocount B[i] = Variable.Array <Vector>(judges); B[i][judges] = Variable.Dirichlet(alpha[i]).ForEach(judges); } VariableArray2D <int> R = Variable.Array <int>(judges, docs); VariableArray2D <Discrete> RsoftConst = Variable.Constant(Rsoft, judges, docs); for (int i = 0; i < numberOfLevels; i++) { using (Variable.ForEach(docs)) { using (Variable.Case(Q[docs], i)) { R[judges, docs] = Variable.Discrete(B[i][judges]); Variable.ConstrainEqualRandom <int, Discrete>(R[judges, docs], RsoftConst[judges, docs]); } } } InferenceEngine engine = new InferenceEngine(new ExpectationPropagation()); for (int i = 0; i < numberOfLevels; i++) { Console.WriteLine("Dist over B[" + i + "]:\n" + engine.Infer(B[i])); } Console.WriteLine("Dist over Q:\n" + engine.Infer(Q)); }
/// <summary> /// Model constructor /// </summary> public BayesianPCAModel() { // The various dimensions will be set externally... observationCount = Variable.New <int>().Named(nameof(observationCount)); featureCount = Variable.New <int>().Named(nameof(featureCount)); componentCount = Variable.New <int>().Named(nameof(componentCount)); observation = new Range(observationCount).Named(nameof(observation)); feature = new Range(featureCount).Named(nameof(feature)); component = new Range(componentCount).Named(nameof(component)); // ... as will the data data = Variable.Array <double>(observation, feature).Named(nameof(data)); // ... and the priors priorAlpha = Variable.New <Gamma>().Named(nameof(priorAlpha)); priorMu = Variable.New <Gaussian>().Named(nameof(priorMu)); priorPi = Variable.New <Gamma>().Named(nameof(priorPi)); // Mixing matrix. Each row is drawn from a Gaussian with zero mean and // a precision which will be learnt. This is a form of Automatic // Relevance Determination (ARD). The larger the precisions become, the // less important that row in the mixing matrix is in explaining the data alpha = Variable.Array <double>(component).Named(nameof(alpha)); W = Variable.Array <double>(component, feature).Named(nameof(W)); alpha[component] = Variable <double> .Random(priorAlpha).ForEach(component); W[component, feature] = Variable.GaussianFromMeanAndPrecision(0, alpha[component]).ForEach(feature); // Initialize the W marginal to break symmetry initW = Variable.Array <Gaussian>(component, feature).Named(nameof(initW)); W[component, feature].InitialiseTo(initW[component, feature]); // Latent variables are drawn from a standard Gaussian Z = Variable.Array <double>(observation, component).Named(nameof(Z)); Z[observation, component] = Variable.GaussianFromMeanAndPrecision(0.0, 1.0).ForEach(observation, component); // Multiply the latent variables with the mixing matrix... T = Variable.MatrixMultiply(Z, W).Named(nameof(T)); // ... add in a bias ... mu = Variable.Array <double>(feature).Named(nameof(mu)); mu[feature] = Variable <double> .Random(priorMu).ForEach(feature); U = Variable.Array <double>(observation, feature).Named(nameof(U)); U[observation, feature] = T[observation, feature] + mu[feature]; // ... and add in some observation noise ... pi = Variable.Array <double>(feature).Named(nameof(pi)); pi[feature] = Variable <double> .Random(priorPi).ForEach(feature); // ... to give the likelihood of observing the data data[observation, feature] = Variable.GaussianFromMeanAndPrecision(U[observation, feature], pi[feature]); // Inference engine engine = new InferenceEngine(); }
internal void ProbabilisticIndexMap() { //TODO: change the path for cross platform using double[,] dataIn = MatlabReader.ReadMatrix(new double[10, 6400 * 3], @"c:\temp\pim\chand.txt", ' '); Vector[,] pixData = new Vector[10, 6400]; for (int i = 0; i < pixData.GetLength(0); i++) { int ct = 0; for (int j = 0; j < pixData.GetLength(1); j++) { pixData[i, j] = Vector.FromArray(dataIn[i, ct++], dataIn[i, ct++], dataIn[i, ct++]); } } Range images = new Range(pixData.GetLength(0)); Range pixels = new Range(pixData.GetLength(1)); VariableArray2D <Vector> pixelData = Variable.Constant(pixData, images, pixels); // For each image we have a palette of L multivariate Gaussians Range L = new Range(2); VariableArray2D <Vector> means = Variable.Array <Vector>(images, L).Named("means"); means[images, L] = Variable.VectorGaussianFromMeanAndPrecision( Vector.FromArray(0.5, 0.5, 0.5), PositiveDefiniteMatrix.Identity(3)).ForEach(images, L); VariableArray2D <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(images, L).Named("precs"); precs[images, L] = Variable.WishartFromShapeAndScale(1.0, PositiveDefiniteMatrix.Identity(3)).ForEach(images, L); // Across all pixels we have a VariableArray <Vector> pi = Variable.Array <Vector>(pixels); pi[pixels] = Variable.Dirichlet(L, new double[] { 1.1, 1.0 }).ForEach(pixels); // For each pixel of each image we have a discrete indicator VariableArray2D <int> ind = Variable.Array <int>(images, pixels).Named("ind"); ind[images, pixels] = Variable.Discrete(pi[pixels]).ForEach(images); using (Variable.ForEach(pixels)) { using (Variable.ForEach(images)) { using (Variable.Switch(ind[images, pixels])) { pixelData[images, pixels] = Variable.VectorGaussianFromMeanAndPrecision(means[images, ind[images, pixels]], precs[images, ind[images, pixels]]); } } } InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing()); ie.ShowProgress = true; ie.NumberOfIterations = 5; Console.WriteLine("Dist over L: " + ie.Infer(pi)); }
internal void ProbabilisticIndexMapNoGate() { //TODO: change path for cross platform using double[,] pixData = MatlabReader.ReadMatrix(new double[10, 6400], @"c:\temp\pim\chand2.txt", ' '); Range images = new Range(pixData.GetLength(0)); Range pixels = new Range(pixData.GetLength(1)); VariableArray2D <double> pixelData = Variable.Constant(pixData, images, pixels); //pixelData.QuoteInMSL = false; // For each image we have a palette of L multivariate Gaussians VariableArray <double> means = Variable.Array <double>(images).Named("means"); means[images] = Variable.GaussianFromMeanAndPrecision(0.5, 1).ForEach(images); VariableArray <double> precs = Variable.Array <double>(images).Named("precs"); precs[images] = Variable.GammaFromShapeAndScale(1.0, 1.0).ForEach(images); // Across all pixels we have a VariableArray <Vector> pi = Variable.Array <Vector>(pixels).Named("pi"); Dirichlet[] dinit = new Dirichlet[pixels.SizeAsInt]; for (int i = 0; i < dinit.Length; i++) { double d = Rand.Double(); dinit[i] = new Dirichlet(1.0 + d / 10, 1.0 - d / 10); } pi[pixels] = Variable.Dirichlet(new double[] { 1.0 }).ForEach(pixels); // For each pixel of each image we have a discrete indicator VariableArray2D <int> ind = Variable.Array <int>(images, pixels).Named("ind"); ind[images, pixels] = Variable.Discrete(pi[pixels]).ForEach(images); using (Variable.ForEach(pixels)) { using (Variable.ForEach(images)) { pixelData[images, pixels] = Variable.GaussianFromMeanAndPrecision(means[images], //10); precs[images]); Variable.ConstrainEqualRandom(ind[images, pixels], Discrete.Uniform(1)); } } InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing()); ie.ModelName = "PIM_NoGate"; ie.NumberOfIterations = 8; ie.ShowTimings = true; DistributionArray <Dirichlet> piDist = ie.Infer <DistributionArray <Dirichlet> >(pi); //Console.WriteLine("Dist over pi: " + ie.Infer(pi)); //TODO: change path for cross platform using WriteMatrix(piDist.ToArray(), @"C:\temp\pim\results.txt"); }
internal void LogisticIrtTest() { Variable <int> numStudents = Variable.New <int>().Named("numStudents"); Range student = new Range(numStudents); VariableArray <double> ability = Variable.Array <double>(student).Named("ability"); ability[student] = Variable.GaussianFromMeanAndPrecision(0, 1e-6).ForEach(student); Variable <int> numQuestions = Variable.New <int>().Named("numQuestions"); Range question = new Range(numQuestions); VariableArray <double> difficulty = Variable.Array <double>(question).Named("difficulty"); difficulty[question] = Variable.GaussianFromMeanAndPrecision(0, 1e-6).ForEach(question); VariableArray <double> discrimination = Variable.Array <double>(question).Named("discrimination"); discrimination[question] = Variable.Exp(Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(question)); VariableArray2D <bool> response = Variable.Array <bool>(student, question).Named("response"); response[student, question] = Variable.BernoulliFromLogOdds(((ability[student] - difficulty[question]).Named("minus") * discrimination[question]).Named("product")); bool[,] data; double[] discriminationTrue = new double[0]; bool useDummyData = false; if (useDummyData) { data = new bool[4, 2]; for (int i = 0; i < data.GetLength(0); i++) { for (int j = 0; j < data.GetLength(1); j++) { data[i, j] = (i > j); } } } else { // simulated data // also try IRT2PL_10_250.mat //TODO: change path for cross platform using Dictionary <string, object> dict = MatlabReader.Read(@"..\..\..\Tests\Data\IRT2PL_10_1000.mat"); Matrix m = (Matrix)dict["Y"]; data = ConvertToBool(m.ToArray()); m = (Matrix)dict["discrimination"]; discriminationTrue = Util.ArrayInit(data.GetLength(1), i => m[i]); } numStudents.ObservedValue = data.GetLength(0); numQuestions.ObservedValue = data.GetLength(1); response.ObservedValue = data; InferenceEngine engine = new InferenceEngine(); engine.Algorithm = new VariationalMessagePassing(); Console.WriteLine(StringUtil.JoinColumns(engine.Infer(discrimination), " should be ", StringUtil.ToString(discriminationTrue))); }
/// <summary> /// Model constructor /// </summary> public BayesianPCAModel() { // The various dimensions will be set externally... vN = Variable.New <int>().Named("NumObs"); vD = Variable.New <int>().Named("NumFeats"); vM = Variable.New <int>().Named("MaxComponents"); rN = new Range(vN).Named("N"); rD = new Range(vD).Named("D"); rM = new Range(vM).Named("M"); // ... as will the data vData = Variable.Array <double>(rN, rD).Named("data"); // ... and the priors priorAlpha = Variable.New <Gamma>().Named("PriorAlpha"); priorMu = Variable.New <Gaussian>().Named("PriorMu"); priorPi = Variable.New <Gamma>().Named("PriorPi"); // Mixing matrix. Each row is drawn from a Gaussian with zero mean and // a precision which will be learnt. This is a form of Automatic // Relevance Determination (ARD). The larger the precisions become, the // less important that row in the mixing matrix is in explaining the data vAlpha = Variable.Array <double>(rM).Named("Alpha"); vW = Variable.Array <double>(rM, rD).Named("W"); vAlpha[rM] = Variable.Random <double, Gamma>(priorAlpha).ForEach(rM); vW[rM, rD] = Variable.GaussianFromMeanAndPrecision(0, vAlpha[rM]).ForEach(rD); // Latent variables are drawn from a standard Gaussian vZ = Variable.Array <double>(rN, rM).Named("Z"); vZ[rN, rM] = Variable.GaussianFromMeanAndPrecision(0.0, 1.0).ForEach(rN, rM); // Multiply the latent variables with the mixing matrix... vT = Variable.MatrixMultiply(vZ, vW).Named("T"); // ... add in a bias ... vMu = Variable.Array <double>(rD).Named("mu"); vMu[rD] = Variable.Random <double, Gaussian>(priorMu).ForEach(rD); vU = Variable.Array <double>(rN, rD).Named("U"); vU[rN, rD] = vT[rN, rD] + vMu[rD]; // ... and add in some observation noise ... vPi = Variable.Array <double>(rD).Named("pi"); vPi[rD] = Variable.Random <double, Gamma>(priorPi).ForEach(rD); // ... to give the likelihood of observing the data vData[rN, rD] = Variable.GaussianFromMeanAndPrecision(vU[rN, rD], vPi[rD]); // Inference engine engine = new InferenceEngine(); return; }
/// <summary> /// Helper method to add a child from two parents /// </summary> /// <param name="parent1">First parent (a variable array over a range of examples)</param> /// <param name="parent2">Second parent (a variable array over the same range)</param> /// <param name="cpt">Conditional probability table</param> /// <returns></returns> public static VariableArray <int> AddChildFromThreeParents( VariableArray <int> parent1, VariableArray <int> parent2, VariableArray <int> parent3, VariableArray2D <VariableArray <Vector>, Vector[, ][]> cpt) { var n = parent1.Range; var child = Variable.Array <int>(n); using (Variable.ForEach(n)) using (Variable.Switch(parent1[n])) using (Variable.Switch(parent2[n])) using (Variable.Switch(parent3[n])) child[n] = Variable.Discrete(cpt[parent2[n], parent3[n]][parent1[n]]); return(child); }
internal void JudgementModel() { int[,] Rdata = new int[, ] { { 0, 1 }, { 0, 1 }, { 0, 1 }, { 1, 0 } }; Range judges = new Range(Rdata.GetLength(0)); Range docs = new Range(Rdata.GetLength(1)); int numberOfLevels = 2; Vector counts = Vector.Constant(numberOfLevels, 1.0); Variable <Vector> Qprior = Variable.Dirichlet(counts); VariableArray <int> Q = Variable.Array <int>(docs); Q[docs] = Variable.Discrete(Qprior).ForEach(docs); Vector[] alpha = new Vector[numberOfLevels]; VariableArray <Vector>[] B = new VariableArray <Vector> [numberOfLevels]; for (int i = 0; i < alpha.Length; i++) { alpha[i] = Vector.Zero(numberOfLevels); alpha[i].SetAllElementsTo(1); // the off-diagonal pseudocount alpha[i][i] = 2; // the diagonal pseudocount B[i] = Variable.Array <Vector>(judges); B[i][judges] = Variable.Dirichlet(alpha[i]).ForEach(judges); } VariableArray2D <int> R = Variable.Constant(Rdata, judges, docs); using (Variable.ForEach(docs)) { for (int i = 0; i < numberOfLevels; i++) { using (Variable.Case(Q[docs], i)) { // TODO: ask infer.net team how to make this sparse R[judges, docs] = Variable.Discrete(B[i][judges]); } } } InferenceEngine engine = new InferenceEngine(new ExpectationPropagation()); for (int i = 0; i < numberOfLevels; i++) { Console.WriteLine("Dist over B[" + i + "]:\n" + engine.Infer(B[i])); } Console.WriteLine("Dist over Q:\n" + engine.Infer(Q)); }
public void Run() { Rand.Restart(12347); // The model Range N = new Range(RatsHeightData.GetLength(0)).Named("N"); Range T = new Range(RatsHeightData.GetLength(1)).Named("T"); Variable <double> alphaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("alphaC"); Variable <double> alphaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("alphaTau"); VariableArray <double> alpha = Variable.Array <double>(N).Named("alpha"); alpha[N] = Variable.GaussianFromMeanAndPrecision(alphaC, alphaTau).ForEach(N); Variable <double> betaC = Variable.GaussianFromMeanAndPrecision(0.0, 1e-4).Named("betaC"); Variable <double> betaTau = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("betaTau"); VariableArray <double> beta = Variable.Array <double>(N).Named("beta"); beta[N] = Variable.GaussianFromMeanAndPrecision(betaC, betaTau).ForEach(N); Variable <double> tauC = Variable.GammaFromShapeAndRate(1e-3, 1e-3).Named("tauC"); VariableArray <double> x = Variable.Observed <double>(RatsXData, T).Named("x"); Variable <double> xbar = Variable.Sum(x) / T.SizeAsInt; VariableArray2D <double> y = Variable.Observed <double>(RatsHeightData, N, T).Named("y"); y[N, T] = Variable.GaussianFromMeanAndPrecision(alpha[N] + (beta[N] * (x[T] - xbar)), tauC); Variable <double> alpha0 = (alphaC - betaC * xbar).Named("alpha0"); // Initialise with the mean of the prior (needed for Gibbs to converge quickly) alphaC.InitialiseTo(Gaussian.PointMass(0.0)); tauC.InitialiseTo(Gamma.PointMass(1.0)); alphaTau.InitialiseTo(Gamma.PointMass(1.0)); betaTau.InitialiseTo(Gamma.PointMass(1.0)); // Inference engine InferenceEngine ie = new InferenceEngine(); Gaussian betaCMarg = ie.Infer <Gaussian>(betaC); Gaussian alpha0Marg = ie.Infer <Gaussian>(alpha0); Gamma tauCMarg = ie.Infer <Gamma>(tauC); // Inference Console.WriteLine("alpha0 = {0}[sd={1}]", alpha0Marg, Math.Sqrt(alpha0Marg.GetVariance()).ToString("g4")); Console.WriteLine("betaC = {0}[sd={1}]", betaCMarg, Math.Sqrt(betaCMarg.GetVariance()).ToString("g4")); Console.WriteLine("tauC = {0}", tauCMarg); }
/// <summary> /// Model constructor /// </summary> public BayesianPCAModel() { // The various dimensions will be set externally... vN = Variable.New<int>().Named("NumObs"); vD = Variable.New<int>().Named("NumFeats"); vM = Variable.New<int>().Named("MaxComponents"); rN = new Range(vN).Named("N"); rD = new Range(vD).Named("D"); rM = new Range(vM).Named("M"); // ... as will the data vData = Variable.Array<double>(rN, rD).Named("data"); // ... and the priors priorAlpha = Variable.New<Gamma>().Named("PriorAlpha"); priorMu = Variable.New<Gaussian>().Named("PriorMu"); priorPi = Variable.New<Gamma>().Named("PriorPi"); // Mixing matrix. Each row is drawn from a Gaussian with zero mean and // a precision which will be learnt. This is a form of Automatic // Relevance Determination (ARD). The larger the precisions become, the // less important that row in the mixing matrix is in explaining the data vAlpha = Variable.Array<double>(rM).Named("Alpha"); vW = Variable.Array<double>(rM, rD).Named("W"); vAlpha[rM] = Variable.Random<double, Gamma>(priorAlpha).ForEach(rM); vW[rM, rD] = Variable.GaussianFromMeanAndPrecision(0, vAlpha[rM]).ForEach(rD); // Latent variables are drawn from a standard Gaussian vZ = Variable.Array<double>(rN, rM).Named("Z"); vZ[rN, rM] = Variable.GaussianFromMeanAndPrecision(0.0, 1.0).ForEach(rN, rM); // Multiply the latent variables with the mixing matrix... vT = Variable.MatrixMultiply(vZ, vW).Named("T"); // ... add in a bias ... vMu = Variable.Array<double>(rD).Named("mu"); vMu[rD] = Variable.Random<double, Gaussian>(priorMu).ForEach(rD); vU = Variable.Array<double>(rN, rD).Named("U"); vU[rN, rD] = vT[rN, rD] + vMu[rD]; // ... and add in some observation noise ... vPi = Variable.Array<double>(rD).Named("pi"); vPi[rD] = Variable.Random<double, Gamma>(priorPi).ForEach(rD); // ... to give the likelihood of observing the data vData[rN, rD] = Variable.GaussianFromMeanAndPrecision(vU[rN, rD], vPi[rD]); // Inference engine engine = new InferenceEngine(); return; }
public ThreeParentNodes(ModelNode node) { this.node = node; Range parent1States = node.parents[0].states; Range parent2States = node.parents[1].states; Range parent3States = node.parents[2].states; CPTPrior = Variable.Array(Variable.Array <Dirichlet>(parent1States), parent2States, parent3States).Named("Prob" + node.name + "Prior"); Dirichlet[, ][] priorObserved = new Dirichlet[parent2States.SizeAsInt, parent3States.SizeAsInt][]; for (int p2 = 0; p2 < parent2States.SizeAsInt; p2++) { for (int p3 = 0; p3 < parent3States.SizeAsInt; p3++) { priorObserved[p2, p3] = Enumerable.Repeat(Dirichlet.Uniform(node.states.SizeAsInt), parent1States.SizeAsInt).ToArray(); } } CPTPrior.ObservedValue = priorObserved; CPT = Variable.Array(Variable.Array <Vector>(parent1States), parent2States, parent3States).Named("Prob" + node.name); CPT[parent2States, parent3States][parent1States] = Variable <Vector> .Random(CPTPrior[parent2States, parent3States][parent1States]); CPT.SetValueRange(node.states); }
public AsthmaModel(string modelName = "AsthmaModel", bool breakSymmetry = true) { BreakSymmetry = breakSymmetry; NumYears = Variable.New <int>().Named("NumYears"); NumChildren = Variable.New <int>().Named("NumChildren"); NumAllergens = Variable.New <int>().Named("NumAllergens"); NumVulnerabilities = Variable.New <int>().Named("NumVulnerabilities"); years = new Range(this.NumYears).Named("years"); children = new Range(this.NumChildren).Named("children"); allergens = new Range(this.NumAllergens).Named("allergens"); classes = new Range(this.NumVulnerabilities).Named("classes"); sensitized = Variable.Array(Variable.Array <bool>(children, allergens), years).Named("sensitized"); skinTest = Variable.Array(Variable.Array <bool>(children, allergens), years).Named("skinTest"); igeTest = Variable.Array(Variable.Array <bool>(children, allergens), years).Named("igeTest"); skinTestMissing = Variable.Array(Variable.Array <bool>(children, allergens), years).Named("skinTestMissing"); igeTestMissing = Variable.Array(Variable.Array <bool>(children, allergens), years).Named("igeTestMissing"); probSensClassPrior = Variable.New <Dirichlet>().Named("probSensClassPrior"); probSensClass = Variable <Vector> .Random(probSensClassPrior).Named("probSensClass"); probSensClass.SetValueRange(classes); sensClass = Variable.Array <int>(children).Named("sensClass"); sensClass[children] = Variable.Discrete(probSensClass).ForEach(children); sensClassInitializer = Variable.New <IDistribution <int[]> >().Named("sensClassInitializer"); if (BreakSymmetry) { sensClass.InitialiseTo(sensClassInitializer); } // Transition probabilities probSens1Prior = Variable.Array <Beta>(allergens, classes).Named("probSens1Prior"); probGainPrior = Variable.Array(Variable.Array <Beta>(allergens, classes), years).Named("probGainPrior"); probRetainPrior = Variable.Array(Variable.Array <Beta>(allergens, classes), years).Named("probRetainPrior"); probSens1 = Variable.Array <double>(allergens, classes).Named("probSens1"); probGain = Variable.Array(Variable.Array <double>(allergens, classes), years).Named("probGain"); probRetain = Variable.Array(Variable.Array <double>(allergens, classes), years).Named("probRetain"); probSens1[allergens, classes] = Variable <double> .Random(probSens1Prior[allergens, classes]); probGain[years][allergens, classes] = Variable <double> .Random(probGainPrior[years][allergens, classes]); probRetain[years][allergens, classes] = Variable <double> .Random(probRetainPrior[years][allergens, classes]); // Emission probabilities probSkinIfSensPrior = Variable.New <Beta>().Named("probSkinIfSensPrior"); probSkinIfNotSensPrior = Variable.New <Beta>().Named("probSkinIfNotSensPrior"); probIgeIfSensPrior = Variable.New <Beta>().Named("probIgeIfSensPrior"); probIgeIfNotSensPrior = Variable.New <Beta>().Named("probIgeIfNotSensPrior"); probSkinIfSens = Variable <double> .Random(probSkinIfSensPrior).Named("probSkinIfSens"); probSkinIfNotSens = Variable <double> .Random(probSkinIfNotSensPrior).Named("probSkinIfNotSens"); probIgeIfSens = Variable <double> .Random(probIgeIfSensPrior).Named("probIgeIfSens"); probIgeIfNotSens = Variable <double> .Random(probIgeIfNotSensPrior).Named("probIgeIfNotSens"); // Transitions using (Variable.ForEach(children)) { using (Variable.Switch(sensClass[children])) { using (Variable.ForEach(allergens)) { using (var block = Variable.ForEach(years)) { var year = block.Index; var yearIs0 = (year == 0).Named("year == 0"); var yearIsGr0 = (year > 0).Named("year > 0"); using (Variable.If(yearIs0)) { sensitized[year][children, allergens] = Variable.Bernoulli(probSens1[allergens, sensClass[children]]); } using (Variable.If(yearIsGr0)) { var prevYear = (year - 1).Named("year - 1"); using (Variable.If(sensitized[prevYear][children, allergens])) { sensitized[year][children, allergens] = Variable.Bernoulli(probRetain[year][allergens, sensClass[children]]); } using (Variable.IfNot(sensitized[prevYear][children, allergens])) { sensitized[year][children, allergens] = Variable.Bernoulli(probGain[year][allergens, sensClass[children]]); } } } } } } // Emissions using (Variable.ForEach(children)) { using (Variable.ForEach(allergens)) { using (Variable.ForEach(years)) { using (Variable.If(sensitized[years][children, allergens])) { using (Variable.IfNot(skinTestMissing[years][children, allergens])) { skinTest[years][children, allergens] = Variable.Bernoulli(probSkinIfSens); } using (Variable.IfNot(igeTestMissing[years][children, allergens])) { igeTest[years][children, allergens] = Variable.Bernoulli(probIgeIfSens); } } using (Variable.IfNot(sensitized[years][children, allergens])) { using (Variable.IfNot(skinTestMissing[years][children, allergens])) { skinTest[years][children, allergens] = Variable.Bernoulli(probSkinIfNotSens); } using (Variable.IfNot(igeTestMissing[years][children, allergens])) { igeTest[years][children, allergens] = Variable.Bernoulli(probIgeIfNotSens); } } } } } Engine = new InferenceEngine() { ShowProgress = false, ModelName = modelName }; Engine.ProgressChanged += Engine_ProgressChanged; }
public LogisticIrtModel(int numParams, PriorType priorType) { numStudents = Variable.New <int>().Named("numStudents"); Range student = new Range(numStudents); abilityMean = Variable.GaussianFromMeanAndVariance(0, 1e6).Named("abilityMean"); abilityPrecision = Variable.GammaFromShapeAndRate(1, 1).Named("abilityPrecision"); ability = Variable.Array <double>(student).Named("ability"); bool useTruncatedGaussianPrior = false; bool useMixturePrior = false; if (!useTruncatedGaussianPrior && !useMixturePrior) { ability[student] = Variable.GaussianFromMeanAndPrecision(abilityMean, abilityPrecision).ForEach(student); } else if (useTruncatedGaussianPrior) { // truncated Gaussian prior for ability double threshold, m, v; bool mildSkew = false; if (mildSkew) { // matched to Mild_skew generator threshold = -1.6464; m = -0.4; v = 1.5; } else { // matched to Extreme_skew generator threshold = -1.0187; m = -10; v = 10; } VariableArray <double> abilityTrunc = Variable.Array <double>(student).Named("abilityTrunc"); abilityTrunc[student] = Variable.TruncatedGaussian(m, v, threshold, double.PositiveInfinity).ForEach(student); ability[student] = Variable.Copy(abilityTrunc[student]); ability.AddAttribute(new MarginalPrototype(new Gaussian())); } else { // mixture abilityMean2 = Variable.GaussianFromMeanAndVariance(0, 1e6).Named("abilityMean2"); abilityPrecision2 = Variable.GammaFromShapeAndRate(1, 1).Named("abilityPrecision2"); Variable <double> weight2 = Variable.Beta(1, 1).Named("weight2"); isExceptional = Variable.Array <bool>(student).Named("isExceptional"); isExceptionalInit = Variable.New <IDistribution <bool[]> >(); isExceptional.InitialiseTo(isExceptionalInit); using (Variable.ForEach(student)) { isExceptional[student] = Variable.Bernoulli(weight2); using (Variable.If(isExceptional[student])) { ability[student] = Variable.GaussianFromMeanAndPrecision(abilityMean2, abilityPrecision2); } using (Variable.IfNot(isExceptional[student])) { ability[student] = Variable.GaussianFromMeanAndPrecision(abilityMean, abilityPrecision); } } } numQuestions = Variable.New <int>().Named("numQuestions"); Range question = new Range(numQuestions); difficultyMean = Variable.GaussianFromMeanAndVariance(0, 1e6).Named("difficultyMean"); difficultyPrecision = Variable.GammaFromShapeAndRate(1, 1).Named("difficultyPrecision"); difficulty = Variable.Array <double>(question).Named("difficulty"); difficulty[question] = Variable.GaussianFromMeanAndPrecision(difficultyMean, difficultyPrecision).ForEach(question); discriminationMean = Variable.GaussianFromMeanAndVariance(0, 1e6).Named("discriminationMean"); discriminationPrecision = Variable.GammaFromShapeAndRate(1, 1).Named("discriminationPrecision"); discrimination = Variable.Array <double>(question).Named("discrimination"); discrimination[question] = Variable.Exp(Variable.GaussianFromMeanAndPrecision(discriminationMean, discriminationPrecision).ForEach(question)); guessProb = Variable.Array <double>(question).Named("guessProb"); guessProb[question] = Variable.Beta(2, 12).ForEach(question); response = Variable.Array <bool>(student, question).Named("response"); if (numParams == 1) { response[student, question] = Variable.BernoulliFromLogOdds(ability[student] - difficulty[question]); } else if (numParams == 2) { response[student, question] = Variable.BernoulliFromLogOdds(((ability[student] - difficulty[question]).Named("minus") * discrimination[question]).Named("product")); } else if (numParams == 3) { using (Variable.ForEach(student)) { using (Variable.ForEach(question)) { Variable <bool> guess = Variable.Bernoulli(guessProb[question]); using (Variable.If(guess)) { response[student, question] = Variable.Bernoulli(1 - 1e-10); } using (Variable.IfNot(guess)) { Variable <double> score = (ability[student] - difficulty[question]) * discrimination[question]; score.Name = "score"; // explicit MarginalPrototype is needed when ability and difficulty are observed score.AddAttribute(new MarginalPrototype(new Gaussian())); response[student, question] = Variable.BernoulliFromLogOdds(score); } } } } else { throw new ArgumentException($"Unsupported number of parameters: {numParams}"); } if (priorType == PriorType.Standard) { // standard normal prior abilityMean.ObservedValue = 0; abilityPrecision.ObservedValue = 1; difficultyMean.ObservedValue = 0; difficultyPrecision.ObservedValue = 1; discriminationMean.ObservedValue = 0; discriminationPrecision.ObservedValue = 4 * 4; } else if (priorType == PriorType.Vague) { // vague prior abilityMean.ObservedValue = 0; abilityPrecision.ObservedValue = 1e-6; difficultyMean.ObservedValue = 0; difficultyPrecision.ObservedValue = 1e-6; discriminationMean.ObservedValue = 0; // must have exp(var) be finite, i.e. var <= 709, precision > 1.5e-3 discriminationPrecision.ObservedValue = 1.5e-2; } else if (priorType == PriorType.StandardVague) { abilityMean.ObservedValue = 0; abilityPrecision.ObservedValue = 1; difficultyMean.ObservedValue = 0; difficultyPrecision.ObservedValue = 1e-6; discriminationMean.ObservedValue = 0; discriminationPrecision.ObservedValue = 1.5e-2; } else if (priorType == PriorType.VagueStandard) { abilityMean.ObservedValue = 0; abilityPrecision.ObservedValue = 1e-6; difficultyMean.ObservedValue = 0; difficultyPrecision.ObservedValue = 1; discriminationMean.ObservedValue = 0; discriminationPrecision.ObservedValue = 4 * 4; } else if (priorType == PriorType.Standard5) { abilityMean.ObservedValue = 0; abilityPrecision.ObservedValue = 1; difficultyMean.ObservedValue = 0; difficultyPrecision.ObservedValue = 1.0 / 25; discriminationMean.ObservedValue = 0; discriminationPrecision.ObservedValue = 4 * 4; } else if (priorType == PriorType.Hierarchical) { // do nothing } else { throw new ArgumentException($"priorType {priorType} is not supported"); } engine = new InferenceEngine(); }
public void BernoulliMixtureGaussianTest() { int N = 10, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.GaussianFromMeanAndVariance(0, 1).ForEach(k, d); VariableArray2D <bool> x = Variable.Array <bool>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = (Variable.GaussianFromMeanAndVariance(p[c[n], d], 1.0) > 0); } } bool geForceProper = GateEnterOp <double> .ForceProper; try { GateEnterOp <double> .ForceProper = true; InferenceEngine engine = new InferenceEngine(); //new VariationalMessagePassing()); engine.Compiler.GivePriorityTo(typeof(IsPositiveOp_Proper)); // needed to avoid improper messages in EP bool[,] data = new bool[N, D]; int N1 = N / 2; int i = 0; for (; i < N1; i++) { data[i, 0] = true; data[i, 1] = false; } for (; i < N; i++) { data[i, 0] = false; data[i, 1] = true; } x.ObservedValue = data; Discrete[] cInit = new Discrete[N]; for (int j = 0; j < N; j++) { double r = Rand.Double(); cInit[j] = new Discrete(r, 1 - r); } c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pExpected = engine.Infer(p); engine.NumberOfIterations = engine.Algorithm.DefaultNumberOfIterations; DistributionArray <Discrete> cPost = engine.Infer <DistributionArray <Discrete> >(c); Console.WriteLine(cPost); DistributionArray2D <Gaussian> pPost = engine.Infer <DistributionArray2D <Gaussian> >(p); Console.WriteLine(pPost); // test resetting inference engine.NumberOfIterations = 1; var pActual = engine.Infer <Diffable>(p); Assert.True(pActual.MaxDiff(pExpected) < 1e-10); } finally { GateEnterOp <double> .ForceProper = geForceProper; } }
public void BernoulliMixtureTest() { int N = 10, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.Beta(1, 1).ForEach(k, d); VariableArray2D <bool> x = Variable.Array <bool>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = Variable.Bernoulli(p[c[n], d]); } } InferenceEngine engine = new InferenceEngine(); bool[,] data = new bool[N, D]; int N1 = N / 2; int i = 0; for (; i < N1; i++) { data[i, 0] = true; data[i, 1] = false; } for (; i < N; i++) { data[i, 0] = false; data[i, 1] = true; } x.ObservedValue = data; Discrete[] cInit = new Discrete[N]; for (int j = 0; j < N; j++) { double r = Rand.Double(); cInit[j] = new Discrete(r, 1 - r); } c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pExpected = engine.Infer(p); engine.NumberOfIterations = engine.Algorithm.DefaultNumberOfIterations; DistributionArray <Discrete> cPost = engine.Infer <DistributionArray <Discrete> >(c); Console.WriteLine(cPost); DistributionArray2D <Beta> pPost = engine.Infer <DistributionArray2D <Beta> >(p); Console.WriteLine(pPost); // test resetting inference engine.NumberOfIterations = 1; var pActual = engine.Infer <Diffable>(p); Assert.True(pActual.MaxDiff(pExpected) < 1e-10); }
internal void JudgementModelSparse() { int[,] Rdata = new int[, ] { { 0, 1 }, { 0, 1 }, { 0, 1 }, { 1, 0 } }; //, { -1, 0 }, { 0, -1 } }; double[,] observed = new double[Rdata.GetLength(0), Rdata.GetLength(1)]; for (int i = 0; i < Rdata.GetLength(0); i++) { for (int j = 0; j < Rdata.GetLength(1); j++) { observed[i, j] = (Rdata[i, j] == -1) ? 0 : 1; Rdata[i, j] = System.Math.Max(Rdata[i, j], 0); } } Range judges = new Range(Rdata.GetLength(0)); Range docs = new Range(Rdata.GetLength(1)); int numberOfLevels = 2; Vector counts = Vector.Constant(numberOfLevels, 1.0); Variable <Vector> Qprior = Variable.Dirichlet(counts); VariableArray <int> Q = Variable.Array <int>(docs); Q[docs] = Variable.Discrete(Qprior).ForEach(docs); Vector[] alpha = new Vector[numberOfLevels]; VariableArray <Vector>[] B = new VariableArray <Vector> [numberOfLevels]; for (int i = 0; i < alpha.Length; i++) { alpha[i] = Vector.Zero(numberOfLevels); alpha[i].SetAllElementsTo(1); // the off-diagonal pseudocount alpha[i][i] = 2; // the diagonal pseudocount B[i] = Variable.Array <Vector>(judges); B[i][judges] = Variable.Dirichlet(alpha[i]).ForEach(judges); } VariableArray2D <int> R = Variable.Constant(Rdata, judges, docs); VariableArray2D <double> obs = Variable.Constant(observed, judges, docs); VariableArray2D <bool> obsVar = Variable.Array <bool>(judges, docs); obsVar[judges, docs] = Variable.Bernoulli(obs[judges, docs]); //Variable.ConstrainEqual(obs[judges, docs], obsVar[judges, docs]); for (int i = 0; i < numberOfLevels; i++) { using (Variable.ForEach(docs)) { using (Variable.ForEach(judges)) { using (Variable.Case(Q[docs], i)) { using (Variable.If(obsVar[judges, docs])) { R[judges, docs] = Variable.Discrete(B[i][judges]); } } } } } InferenceEngine engine = new InferenceEngine(new ExpectationPropagation()); for (int i = 0; i < numberOfLevels; i++) { Console.WriteLine("Dist over B[" + i + "]:\n" + engine.Infer(B[i])); } Console.WriteLine("Dist over Q:\n" + engine.Infer(Q)); }
public void PoissonMixtureTest() { Rand.Restart(1); int N = 40, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.GammaFromMeanAndVariance(10, 100).ForEach(k, d); VariableArray2D <int> x = Variable.Array <int>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = Variable.Poisson(p[c[n], d]); } } //n.AddAttribute(new Sequential()); //c.AddAttribute(new DivideMessages(false)); InferenceEngine engine = new InferenceEngine(); //engine.Algorithm = new VariationalMessagePassing(); int[,] data = new int[N, D]; int N1 = N / 2; double[,] mean = new double[K, D]; for (int i = 0; i < K; i++) { for (int j = 0; j < D; j++) { //mean[i, j] = i+j; mean[i, j] = (i + j + 1) * 10; } } Discrete[] cInit = new Discrete[N]; for (int i = 0; i < N; i++) { int cluster = i % 2; for (int j = 0; j < D; j++) { data[i, j] = Rand.Poisson(mean[cluster, j]); } double r = cluster; cInit[i] = new Discrete(1 - r, r); } x.ObservedValue = data; c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pPost1 = engine.Infer(p); engine.NumberOfIterations = 200; Gamma[,] pPost = engine.Infer <Gamma[, ]>(p); for (int i = 0; i < pPost.GetLength(0); i++) { for (int j = 0; j < pPost.GetLength(1); j++) { double mActual = pPost[i, j].GetMean(); double mExpected = mean[i, j]; Console.WriteLine(String.Format("pPost[{0}][{1}] = {2} should be {3}", i, j, mActual, mExpected)); Assert.True(MMath.AbsDiff(mExpected, mActual, 1e-6) < 0.3); } } // test resetting inference engine.NumberOfIterations = 1; var pPost2 = engine.Infer <Diffable>(p); Assert.True(pPost2.MaxDiff(pPost1) < 1e-10); }
private void BugsRats(bool initialiseAlpha, bool initialiseAlphaC) { Rand.Restart(0); double precOfGaussianPrior = 1.0E-6; double shapeRateOfGammaPrior = 0.02; // smallest choice that will avoid zeros double meanOfBetaPrior = 0.0; double meanOfAlphaPrior = 0.0; // The model int N = RatsHeightData.GetLength(0); int T = RatsHeightData.GetLength(1); double xbar = 22.0; double[] xDataZeroMean = new double[RatsXData.Length]; for (int i = 0; i < RatsXData.Length; i++) { xDataZeroMean[i] = RatsXData[i] - xbar; } Range r = new Range(N).Named("N"); Range w = new Range(T).Named("T"); VariableArray2D <double> y = Variable.Observed <double>(RatsHeightData, r, w).Named("y"); VariableArray <double> x = Variable.Observed <double>(xDataZeroMean, w).Named("x"); Variable <double> tauC = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("tauC"); Variable <double> alphaC = Variable.GaussianFromMeanAndPrecision(meanOfAlphaPrior, precOfGaussianPrior).Named("alphaC"); Variable <double> alphaTau = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("alphaTau"); Variable <double> betaC = Variable.GaussianFromMeanAndPrecision(meanOfBetaPrior, precOfGaussianPrior).Named("betaC"); Variable <double> betaTau = Variable.GammaFromShapeAndRate(shapeRateOfGammaPrior, shapeRateOfGammaPrior).Named("betaTau"); VariableArray <double> alpha = Variable.Array <double>(r).Named("alpha"); alpha[r] = Variable.GaussianFromMeanAndPrecision(alphaC, alphaTau).ForEach(r); VariableArray <double> beta = Variable.Array <double>(r).Named("beta"); beta[r] = Variable.GaussianFromMeanAndPrecision(betaC, betaTau).ForEach(r); VariableArray2D <double> mu = Variable.Array <double>(r, w).Named("mu"); VariableArray2D <double> betaX = Variable.Array <double>(r, w).Named("betax"); betaX[r, w] = beta[r] * x[w]; mu[r, w] = alpha[r] + betaX[r, w]; y[r, w] = Variable.GaussianFromMeanAndPrecision(mu[r, w], tauC); Variable <double> alpha0 = (alphaC - xbar * betaC).Named("alpha0"); InferenceEngine ie; GibbsSampling gs = new GibbsSampling(); // Initialise both alpha and beta together. // Initialising only alpha (or only beta) is not reliable because you could by chance get a large betaTau and small tauC to start, // at which point beta and alphaC become garbage, leading to alpha becoming garbage on the next iteration. bool initialiseBeta = initialiseAlpha; bool initialiseBetaC = initialiseAlphaC; if (initialiseAlpha) { Gaussian[] alphaInit = new Gaussian[N]; for (int i = 0; i < N; i++) { alphaInit[i] = Gaussian.FromMeanAndPrecision(250.0, 1.0); } alpha.InitialiseTo(Distribution <double> .Array(alphaInit)); } if (initialiseBeta) { Gaussian[] betaInit = new Gaussian[N]; for (int i = 0; i < N; i++) { betaInit[i] = Gaussian.FromMeanAndPrecision(6.0, 1.0); } beta.InitialiseTo(Distribution <double> .Array(betaInit)); } if (initialiseAlphaC) { alphaC.InitialiseTo(Gaussian.FromMeanAndVariance(250.0, 1.0)); } if (initialiseBetaC) { betaC.InitialiseTo(Gaussian.FromMeanAndVariance(6.0, 1.0)); } if (false) { //tauC.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1)); //alphaTau.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1)); //betaTau.InitialiseTo(Gamma.FromMeanAndVariance(1.0, 0.1)); } if (!initialiseAlpha && !initialiseBeta && !initialiseAlphaC && !initialiseBetaC) { gs.BurnIn = 1000; } ie = new InferenceEngine(gs); ie.ShowProgress = false; ie.ModelName = "BugsRats"; ie.NumberOfIterations = 4000; ie.OptimiseForVariables = new List <IVariable>() { alphaC, betaC, alpha0, tauC }; betaC.AddAttribute(QueryTypes.Marginal); betaC.AddAttribute(QueryTypes.Samples); alpha0.AddAttribute(QueryTypes.Marginal); alpha0.AddAttribute(QueryTypes.Samples); tauC.AddAttribute(QueryTypes.Marginal); tauC.AddAttribute(QueryTypes.Samples); // Inference object alphaCActual = ie.Infer(alphaC); Gaussian betaCMarg = ie.Infer <Gaussian>(betaC); Gaussian alpha0Marg = ie.Infer <Gaussian>(alpha0); Gamma tauCMarg = ie.Infer <Gamma>(tauC); // Check results against BUGS Gaussian betaCExpected = new Gaussian(6.185, System.Math.Pow(0.1068, 2)); Gaussian alpha0Expected = new Gaussian(106.6, System.Math.Pow(3.625, 2)); double sigmaMeanExpected = 6.082; double sigmaMean = System.Math.Sqrt(1.0 / tauCMarg.GetMean()); if (!initialiseAlpha && !initialiseAlphaC) { Debug.WriteLine("betaC = {0} should be {1}", betaCMarg, betaCExpected); Debug.WriteLine("alpha0 = {0} should be {1}", alpha0Marg, alpha0Expected); } Assert.True(GaussianDiff(betaCExpected, betaCMarg) < 0.1); Assert.True(GaussianDiff(alpha0Expected, alpha0Marg) < 0.1); Assert.True(MMath.AbsDiff(sigmaMeanExpected, sigmaMean, 0.1) < 0.1); IList <double> betaCSamples = ie.Infer <IList <double> >(betaC, QueryTypes.Samples); IList <double> alpha0Samples = ie.Infer <IList <double> >(alpha0, QueryTypes.Samples); IList <double> tauCSamples = ie.Infer <IList <double> >(tauC, QueryTypes.Samples); GaussianEstimator est = new GaussianEstimator(); foreach (double sample in betaCSamples) { est.Add(sample); } Gaussian betaCMarg2 = est.GetDistribution(new Gaussian()); Assert.True(GaussianDiff(betaCMarg, betaCMarg2) < 0.1); }
private void DefineModel() { this.observationCount = Variable.New<int>().Named("observation_count"); this.gridWidth = Variable.New<int>().Named("grid_width"); this.gridHeight = Variable.New<int>().Named("grid_height"); this.shapePartCount = Variable.New<int>().Named("shape_part_count"); this.traitCount = Variable.New<int>().Named("trait_count"); this.observationRange = new Range(this.observationCount).Named("observation_range"); this.xyRange = new Range(2).Named("xy_range"); this.widthRange = new Range(this.gridWidth).Named("width_range"); this.heightRange = new Range(this.gridHeight).Named("height_range"); this.shapePartRange = new Range(this.shapePartCount).Named("shape_part_range"); this.traitRange = new Range(this.traitCount).Named("trait_range"); this.shapeLocationMeanPrior = Variable.New<GaussianArray1D>().Named("shape_location_mean_prior"); this.shapeLocationMean = Variable.Array<double>(this.xyRange).Named("shape_location_mean"); this.shapeLocationMean.SetTo(Variable<double[]>.Random(this.shapeLocationMeanPrior)); this.shapeLocationPrecisionPrior = Variable.New<GammaArray1D>().Named("shape_location_prec_prior"); this.shapeLocationPrecision = Variable.Array<double>(this.xyRange).Named("shape_location_prec"); this.shapeLocationPrecision.SetTo(Variable<double[]>.Random(this.shapeLocationPrecisionPrior)); this.shapePartOffsetWeightPriors = Variable.New<GaussianArray3D>().Named("shape_part_offset_weight_prior"); this.shapePartOffsetWeights = Variable.Array(Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange), this.shapePartRange).Named("shape_part_offset_weights"); this.shapePartOffsetWeights.SetTo(Variable<double[][][]>.Random(this.shapePartOffsetWeightPriors)); this.shapePartLogScaleWeightPriors = Variable.New<GaussianArray3D>().Named("shape_part_scale_weight_prior"); this.shapePartLogScaleWeights = Variable.Array(Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange), this.shapePartRange).Named("shape_part_scale_weights"); this.shapePartLogScaleWeights.SetTo(Variable<double[][][]>.Random(this.shapePartLogScaleWeightPriors)); this.shapePartAngleWeightPriors = Variable.New<GaussianArray2D>().Named("shape_part_angle_weight_prior"); this.shapePartAngleWeights = Variable.Array(Variable.Array<double>(this.traitRange), this.shapePartRange).Named("shape_part_angle_weights"); this.shapePartAngleWeights.SetTo(Variable<double[][]>.Random(this.shapePartAngleWeightPriors)); this.shapePartOffsetPrecisionPriors = Variable.New<GammaArray2D>().Named("shape_part_offset_prec_prior"); this.shapePartOffsetPrecisions = Variable.Array(Variable.Array<double>(this.xyRange), this.shapePartRange).Named("shape_part_offset_prec"); this.shapePartOffsetPrecisions.SetTo(Variable<double[][]>.Random(this.shapePartOffsetPrecisionPriors)); this.shapePartLogScalePrecisionPriors = Variable.New<GammaArray2D>().Named("shape_part_scale_prec_prior"); this.shapePartLogScalePrecisions = Variable.Array(Variable.Array<double>(this.xyRange), this.shapePartRange).Named("shape_part_scale_prec"); this.shapePartLogScalePrecisions.SetTo(Variable<double[][]>.Random(this.shapePartLogScalePrecisionPriors)); this.shapePartAnglePrecisionPriors = Variable.New<GammaArray1D>().Named("shape_part_angle_prec_prior"); this.shapePartAnglePrecisions = Variable.Array<double>(this.shapePartRange).Named("shape_part_angle_prec"); this.shapePartAnglePrecisions.SetTo(Variable<double[]>.Random(this.shapePartAnglePrecisionPriors)); this.globalLogScalePrior = Variable.New<GaussianArray1D>().Named("global_log_scale_prior"); this.globalLogScale = Variable.Array<double>(this.observationRange).Named("global_log_scale"); this.globalLogScale.SetTo(Variable<double[]>.Random(this.globalLogScalePrior)); this.shapeLocation = Variable.Array(Variable.Array<double>(this.xyRange), this.observationRange).Named("shape_location"); this.shapePartLocation = Variable.Array(Variable.Array(Variable.Array<double>(this.xyRange), this.shapePartRange), this.observationRange).Named("shape_part_location"); this.shapePartLocation.AddAttribute(new PointEstimate()); this.shapePartOrientation = Variable.Array(Variable.Array<PositiveDefiniteMatrix>(this.shapePartRange), this.observationRange).Named("shape_part_orientation"); this.shapePartOrientation.AddAttribute(new PointEstimate()); this.shapeTraitsPrior = Variable.New<GaussianArray2D>().Named("shape_traits_prior"); // Needs to be observed in the derived classes this.shapeTraits = Variable.Array(Variable.Array<double>(this.traitRange), this.observationRange).Named("shape_traits"); this.shapeTraits.SetTo(Variable<double[][]>.Random(this.shapeTraitsPrior)); this.observationNoiseProbability = Variable.New<double>().Named("observation_noise_prob"); this.pixelCoords = Variable.Array<Vector>(this.widthRange, this.heightRange).Named("pixel_coords"); this.labels = Variable.Array<VariableArray2D<bool>, bool[][,]>(Variable.Array<bool>(this.widthRange, this.heightRange), this.observationRange) .Named("labels"); this.noisyLabels = Variable.Array<VariableArray2D<bool>, bool[][,]>(Variable.Array<bool>(this.widthRange, this.heightRange), this.observationRange) .Named("noisy_labels"); this.noisyLabelsConstraint = Variable.Array<VariableArray2D<Bernoulli>, Bernoulli[][,]>(Variable.Array<Bernoulli>(this.widthRange, this.heightRange), this.observationRange) .Named("noisy_labels_constraint"); using (var observationIter = Variable.ForEach(this.observationRange)) { this.shapeLocation[this.observationRange][this.xyRange] = Variable.GaussianFromMeanAndPrecision(this.shapeLocationMean[this.xyRange], this.shapeLocationPrecision[this.xyRange]); using (Variable.ForEach(this.shapePartRange)) { const double productDamping = 0.5; // Location var shapePartOffsetMeanTraitWeightProducts = Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange).Named("shape_part_offset_mean_products"); shapePartOffsetMeanTraitWeightProducts[this.xyRange][this.traitRange] = Variable<double>.Factor(Factor.Product_SHG09, this.shapeTraits[this.observationRange][this.traitRange], this.shapePartOffsetWeights[this.shapePartRange][this.xyRange][this.traitRange]); var shapePartOffsetMeanTraitWeightProductsDamped = Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange).Named("shape_part_offset_mean_products_damped"); shapePartOffsetMeanTraitWeightProductsDamped[this.xyRange][this.traitRange] = Variable<double>.Factor(Damp.Forward<double>, shapePartOffsetMeanTraitWeightProducts[this.xyRange][this.traitRange], productDamping); var shapePartOffsetMean = Variable.Array<double>(this.xyRange).Named("shape_part_offset_mean"); shapePartOffsetMean[this.xyRange] = Variable.Sum(shapePartOffsetMeanTraitWeightProductsDamped[this.xyRange]); var shapePartOffset = Variable.GaussianFromMeanAndPrecision( shapePartOffsetMean[this.xyRange], this.shapePartOffsetPrecisions[this.shapePartRange][this.xyRange]).Named("shape_part_offset"); this.shapePartLocation[this.observationRange][this.shapePartRange][this.xyRange] = this.shapeLocation[this.observationRange][this.xyRange] + shapePartOffset; // Orientation var shapePartLogScaleMeanTraitWeightProducts = Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange).Named("shape_part_logscale_mean_products"); shapePartLogScaleMeanTraitWeightProducts[this.xyRange][this.traitRange] = Variable<double>.Factor(Factor.Product_SHG09, this.shapeTraits[this.observationRange][this.traitRange], this.shapePartLogScaleWeights[this.shapePartRange][this.xyRange][this.traitRange]); var shapePartLogScaleMeanTraitWeightProductsDamped = Variable.Array(Variable.Array<double>(this.traitRange), this.xyRange).Named("shape_part_logscale_mean_products_damped"); shapePartLogScaleMeanTraitWeightProductsDamped[this.xyRange][this.traitRange] = Variable<double>.Factor(Damp.Forward<double>, shapePartLogScaleMeanTraitWeightProducts[this.xyRange][this.traitRange], productDamping); var shapePartLogScaleMean = Variable.Array<double>(this.xyRange).Named("shape_part_logscale_mean"); shapePartLogScaleMean[this.xyRange] = Variable.Sum(shapePartLogScaleMeanTraitWeightProductsDamped[this.xyRange]); var shapePartLogScale = Variable.Array<double>(this.xyRange).Named("shape_part_logscale"); shapePartLogScale[this.xyRange] = Variable.GaussianFromMeanAndPrecision( shapePartLogScaleMean[this.xyRange], this.shapePartLogScalePrecisions[this.shapePartRange][this.xyRange]); var shapePartAngleMeanTraitWeightProducts = Variable.Array<double>(this.traitRange).Named("shape_part_angle_mean_products"); shapePartAngleMeanTraitWeightProducts[this.traitRange] = Variable<double>.Factor(Factor.Product_SHG09, this.shapeTraits[this.observationRange][this.traitRange], this.shapePartAngleWeights[this.shapePartRange][this.traitRange]); var shapePartAngleMeanTraitWeightProductsDamped = Variable.Array<double>(this.traitRange).Named("shape_part_angle_mean_products_damped"); shapePartAngleMeanTraitWeightProductsDamped[this.traitRange] = Variable<double>.Factor(Damp.Forward<double>, shapePartAngleMeanTraitWeightProducts[this.traitRange], productDamping); var shapePartAngleMean = Variable.Sum(shapePartAngleMeanTraitWeightProductsDamped).Named("shape_part_angle_mean"); var shapePartAngle = Variable.GaussianFromMeanAndPrecision( shapePartAngleMean, this.shapePartAnglePrecisions[this.shapePartRange]).Named("shape_part_angle"); this.shapePartOrientation[this.observationRange][this.shapePartRange] = Variable<PositiveDefiniteMatrix>.Factor( ShapeFactors.MatrixFromAngleScale, shapePartLogScale[0] /*+ this.globalLogScale[observationRange]*/, shapePartLogScale[1] /*+ this.globalLogScale[observationRange]*/, shapePartAngle); this.shapePartOrientation[this.observationRange][this.shapePartRange].AddAttribute(new MarginalPrototype(new Wishart(2))); } using (Variable.ForEach(this.widthRange)) using (Variable.ForEach(this.heightRange)) { var labelsByPart = Variable.Array<bool>(this.shapePartRange).Named("labels_by_part"); using (Variable.ForEach(this.shapePartRange)) { labelsByPart[this.shapePartRange] = Variable<bool>.Factor( ShapeFactors.LabelFromShape, this.pixelCoords[this.widthRange, this.heightRange], this.shapePartLocation[this.observationRange][this.shapePartRange][0], this.shapePartLocation[this.observationRange][this.shapePartRange][1], this.shapePartOrientation[this.observationRange][this.shapePartRange]); } this.labels[this.observationRange][this.widthRange, this.heightRange] = Variable<bool>.Factor(Factors.AnyTrue, labelsByPart); //using (Variable.Repeat(100)) { using (Variable.If(this.labels[this.observationRange][this.widthRange, this.heightRange])) { this.noisyLabels[this.observationRange][this.widthRange, this.heightRange] = !Variable.Bernoulli(this.observationNoiseProbability); } using (Variable.IfNot(this.labels[this.observationRange][this.widthRange, this.heightRange])) { this.noisyLabels[this.observationRange][this.widthRange, this.heightRange] = Variable.Bernoulli(this.observationNoiseProbability); } } //Variable.ConstrainEqualRandom( // this.noisyLabels[this.observationRange][this.widthRange, this.heightRange], // this.noisyLabelsConstraint[this.observationRange][this.widthRange, this.heightRange]); } } }