protected void test_ToothacheCavityCatchWeatherModel_Distributions( IFiniteProbabilityModel model) { AssignmentProposition asunny = new AssignmentProposition( ExampleRV.WEATHER_RV, "sunny"); AssignmentProposition acavity = new AssignmentProposition( ExampleRV.CAVITY_RV, true); // Should be able to run all the same queries for this independent // sub model. test_ToothacheCavityCatchModel_Distributions(model); // AIMA3e pg. 487 // P(sunny, Cavity) // Would be a two-element vector giving the probabilities of a sunny day // with a cavity and a sunny day with no cavity. assertArrayEquals(new double[] { 0.12, 0.48 }, model .priorDistribution(asunny, ExampleRV.CAVITY_RV).getValues(), DELTA_THRESHOLD); // AIMA3e pg. 488 (i.e. one element Vector returned) // P(sunny, cavity) assertArrayEquals(new double[] { 0.12 }, model .priorDistribution(asunny, acavity).getValues(), DELTA_THRESHOLD); // P(sunny AND cavity) assertArrayEquals(new double[] { 0.12 }, model .priorDistribution(new ConjunctiveProposition(asunny, acavity)) .getValues(), DELTA_THRESHOLD); // P(sunny) = <0.6> assertArrayEquals(new double[] { 0.6 }, model.priorDistribution(asunny).getValues(), DELTA_THRESHOLD); }
// function ELIMINATION-ASK(X, e, bn) returns a distribution over X /** * The ELIMINATION-ASK algorithm in Figure 14.11. * * @param X * the query variables. * @param e * observed values for variables E. * @param bn * a Bayes net with variables {X} ∪ E ∪ Y /* Y = hidden * variables // * @return a distribution over the query variables. */ public CategoricalDistribution eliminationAsk(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn) { Set<RandomVariable> hidden = new Set<RandomVariable>(); List<RandomVariable> VARS = new List<RandomVariable>(); calculateVariables(X, e, bn, hidden, VARS); // factors <- [] List<Factor> factors = new List<Factor>(); // for each var in ORDER(bn.VARS) do foreach (RandomVariable var in order(bn, VARS)) { // factors <- [MAKE-FACTOR(var, e) | factors] factors.Add(0, makeFactor(var, e, bn)); // if var is hidden variable then factors <- SUM-OUT(var, factors) if (hidden.Contains(var)) { factors = sumOut(var, factors, bn); } } // return NORMALIZE(POINTWISE-PRODUCT(factors)) Factor product = pointwiseProduct(factors); // Note: Want to ensure the order of the product matches the // query variables return ((ProbabilityTable) product.pointwiseProductPOS(_identity, X)) .normalize(); }
// AIMA3e pg. 512 protected void test_BurglaryAlarmModel(IProbabilityModel model) { Assert.IsTrue(model.isValid()); AssignmentProposition aburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, true); AssignmentProposition anotburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, false); AssignmentProposition anotearthquake = new AssignmentProposition( ExampleRV.EARTHQUAKE_RV, false); AssignmentProposition aalarm = new AssignmentProposition( ExampleRV.ALARM_RV, true); AssignmentProposition anotalarm = new AssignmentProposition( ExampleRV.ALARM_RV, false); AssignmentProposition ajohnCalls = new AssignmentProposition( ExampleRV.JOHN_CALLS_RV, true); AssignmentProposition amaryCalls = new AssignmentProposition( ExampleRV.MARY_CALLS_RV, true); // AIMA3e pg. 514 Assert.AreEqual(0.00062811126, model.prior(ajohnCalls, amaryCalls, aalarm, anotburglary, anotearthquake), DELTA_THRESHOLD); Assert.AreEqual(0.00049800249, model.prior(ajohnCalls, amaryCalls, anotalarm, anotburglary, anotearthquake), DELTA_THRESHOLD); // AIMA3e pg. 524 // P(Burglary = true | JohnCalls = true, MaryCalls = true) = 0.00059224 Assert.AreEqual(0.00059224, model.prior(aburglary, ajohnCalls, amaryCalls), DELTA_THRESHOLD); // P(Burglary = false | JohnCalls = true, MaryCalls = true) = 0.0014919 Assert.AreEqual(0.00149185764899, model.prior(anotburglary, ajohnCalls, amaryCalls), DELTA_THRESHOLD); }
/** * Reset this instances persistent variables to be used between called to * particleFiltering(). * * @param N * the number of samples to be maintained * @param dbn * a DBN with prior <b>P</b>(<b>X</b><sub>0</sub>), transition * model <b>P</b>(<b>X</b><sub>1</sub> | <b>X</b><sub>0</sub>), * sensor model <b>P</b>(<b>E</b><sub>1</sub> | * <b>X</b><sub>1</sub>) */ public void initPersistent(int N, IDynamicBayesianNetwork dbn) { this.N = N; this.dbn = dbn; // persistent: S, a vector of samples of size N, initially generated // from <b>P</b>(<b>X</b><sub>0</sub>) S = new AssignmentProposition[N][]; S_tp1 = new AssignmentProposition[N][]; int[] indexes = new int[N]; for (int i = 0; i < N; ++i) { S[i] = new AssignmentProposition[this.dbn.GetX_0().Size()]; S_tp1[i] = new AssignmentProposition[this.dbn.GetX_0().Size()]; indexes[i] = i; IMap <IRandomVariable, object> sample = priorSampler.priorSample(this.dbn.GetPriorNetwork()); int idx = 0; foreach (var sa in sample) { S[i][idx] = new AssignmentProposition(this.dbn.GetX_0_to_X_1().Get(sa.GetKey()), sa.GetValue()); S_tp1[i][idx] = new AssignmentProposition(this.dbn.GetX_0_to_X_1().Get(sa.GetKey()), sa.GetValue()); idx++; } } sensorModel = new FiniteBayesModel(dbn, new EliminationAsk()); sampleIndexes = new RandVar("SAMPLE_INDEXES", new FiniteIntegerDomain(indexes)); }
static void particleFilterinfDemo() { System.Console.WriteLine("DEMO: Particle-Filtering"); System.Console.WriteLine("========================"); System.Console.WriteLine("Figure 15.18"); System.Console.WriteLine("------------"); MockRandomizer mr = new MockRandomizer(new double[] { // Prior Sample: // 8 with Rain_t-1=true from prior distribution 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, // 2 with Rain_t-1=false from prior distribution 0.6, 0.6, // (a) Propagate 6 samples Rain_t=true 0.7, 0.7, 0.7, 0.7, 0.7, 0.7, // 4 samples Rain_t=false 0.71, 0.71, 0.31, 0.31, // (b) Weight should be for first 6 samples: // Rain_t-1=true, Rain_t=true, Umbrella_t=false = 0.1 // Next 2 samples: // Rain_t-1=true, Rain_t=false, Umbrealla_t=false= 0.8 // Final 2 samples: // Rain_t-1=false, Rain_t=false, Umbrella_t=false = 0.8 // gives W[] = // [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.8, 0.8, 0.8, 0.8] // normalized = // [0.026, ...., 0.211, ....] is approx. 0.156 = true // the remainder is false // (c) Resample 2 Rain_t=true, 8 Rain_t=false 0.15, 0.15, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, // // Next Sample: // (a) Propagate 1 samples Rain_t=true 0.7, // 9 samples Rain_t=false 0.71, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, // (c) resample 1 Rain_t=true, 9 Rain_t=false 0.0001, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2 }); int N = 10; ParticleFiltering pf = new ParticleFiltering(N, DynamicBayesNetExampleFactory.getUmbrellaWorldNetwork(), mr); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false) }; System.Console.WriteLine("First Sample Set:"); AssignmentProposition[][] S = pf.particleFiltering(e); for (int i = 0; i < N; ++i) { System.Console.WriteLine("Sample " + (i + 1) + " = " + S[i][0]); } System.Console.WriteLine("Second Sample Set:"); S = pf.particleFiltering(e); for (int i = 0; i < N; ++i) { System.Console.WriteLine("Sample " + (i + 1) + " = " + S[i][0]); } System.Console.WriteLine("========================"); }
public void testLikelihoodWeighting_AIMA3e_pg533() { // AIMA3e pg. 533 // <b>P</b>(Rain | Cloudy = true, WetGrass = true) IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork(); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.CLOUDY_RV, true), new AssignmentProposition(ExampleRV.WET_GRASS_RV, true) }; // sample P(Sprinkler | Cloudy = true) = <0.1, 0.9>; suppose // Sprinkler=false // sample P(Rain | Cloudy = true) = <0.8, 0.2>; suppose Rain=true MockRandomizer r = new MockRandomizer(new double[] { 0.5, 0.5 }); LikelihoodWeighting lw = new LikelihoodWeighting(r); double[] estimate = lw.likelihoodWeighting( new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 1) .getValues(); // Here the event [true,false,true,true] should have weight 0.45, // and this is tallied under Rain = true, which when normalized // should be <1.0, 0.0>; assertArrayEquals(new double[] { 1.0, 0.0 }, estimate, DELTA_THRESHOLD); }
private void sampleFromTransitionModel(int i) { // x <- an event initialized with S[i] IMap <IRandomVariable, object> x = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); for (int n = 0; n < S[i].Length; n++) { AssignmentProposition x1 = S[i][n]; x.Put(this.dbn.GetX_1_to_X_0().Get(x1.getTermVariable()), x1.getValue()); } // foreach variable X<sub>1<sub>i</sub></sub> in // X<sub>1<sub>1</sub></sub>,...,X<sub>1<sub>n<</sub>/sub> do foreach (IRandomVariable X1_i in dbn.GetX_1_VariablesInTopologicalOrder()) { // x1[i] <- a random sample from // <b>P</b>(X<sub>1<sub>i</sub></sub> | // parents(X<sub>1<sub>i</sub></sub>)) x.Put(X1_i, ProbUtil.randomSample(dbn.GetNode(X1_i), x, randomizer)); } // S[i] <- sample from <b>P</b>(<b>X</b><sub>1</sub> | // <b>X</b><sub>0</sub> = S[i]) for (int n = 0; n < S_tp1[i].Length; n++) { AssignmentProposition x1 = S_tp1[i][n]; x1.setValue(x.Get(x1.getTermVariable())); } }
public void testGibbsAsk_compare() { // create two nodes: parent and child with an arc from parent to child IRandomVariable rvParent = new RandVar("Parent", new BooleanDomain()); IRandomVariable rvChild = new RandVar("Child", new BooleanDomain()); FullCPTNode nodeParent = new FullCPTNode(rvParent, new double[] { 0.7, 0.3 }); new FullCPTNode(rvChild, new double[] { 0.8, 0.2, 0.2, 0.8 }, nodeParent); // create net BayesNet net = new BayesNet(nodeParent); // query parent probability IRandomVariable[] rvX = new IRandomVariable[] { rvParent }; // ...given child evidence (true) AssignmentProposition[] propE = new AssignmentProposition[] { new AssignmentProposition(rvChild, true) }; // sample with LikelihoodWeighting ICategoricalDistribution samplesLW = new LikelihoodWeighting().Ask(rvX, propE, net, 1000); Assert.AreEqual(0.9, samplesLW.getValue(true), DELTA_THRESHOLD); // sample with RejectionSampling ICategoricalDistribution samplesRS = new RejectionSampling().Ask(rvX, propE, net, 1000); Assert.AreEqual(0.9, samplesRS.getValue(true), DELTA_THRESHOLD); // sample with GibbsAsk ICategoricalDistribution samplesGibbs = new GibbsAsk().Ask(rvX, propE, net, 1000); Assert.AreEqual(0.9, samplesGibbs.getValue(true), DELTA_THRESHOLD); }
// function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e) /** * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given * evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @param Nsamples * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public CategoricalDistribution gibbsAsk(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn, int Nsamples) { // local variables: <b>N</b>, a vector of counts for each value of X, // initially zero double[] N = new double[ProbUtil .expectedSizeOfCategoricalDistribution(X)]; // Z, the nonevidence variables in bn Set<RandomVariable> Z = new Set<RandomVariable>( bn.getVariablesInTopologicalOrder()); foreach (AssignmentProposition ap in e) { Z.Remove(ap.getTermVariable()); } // <b>x</b>, the current state of the network, initially copied from e Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>(); foreach (AssignmentProposition ap in e) { x.Add(ap.getTermVariable(), ap.getValue()); } // initialize <b>x</b> with random values for the variables in Z foreach (RandomVariable Zi in Z) { x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer)); } // for j = 1 to N do for (int j = 0; j < Nsamples; j++) { // for each Z<sub>i</sub> in Z do foreach (RandomVariable Zi in Z) { // set the value of Z<sub>i</sub> in <b>x</b> by sampling from // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>)) x.put(Zi, ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer)); } // Note: moving this outside the previous for loop, // as described in fig 14.6, as will only work // correctly in the case of a single query variable X. // However, when multiple query variables, rare events // will get weighted incorrectly if done above. In case // of single variable this does not happen as each possible // value gets * |Z| above, ending up with the same ratios // when normalized (i.e. its still more efficient to place // outside the loop). // // <b>N</b>[x] <- <b>N</b>[x] + 1 // where x is the value of X in <b>x</b> N[ProbUtil.indexOf(X, x)] += 1.0; } // return NORMALIZE(<b>N</b>) return new ProbabilityTable(N, X).normalize(); }
public void testPriorSample_basic() { IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork(); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.SPRINKLER_RV, true) }; MockRandomizer r = new MockRandomizer(new double[] { 0.1 }); RejectionSampling rs = new RejectionSampling(new PriorSample(r)); double[] estimate = rs.rejectionSampling(new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 100).getValues(); assertArrayEquals(new double[] { 1.0, 0.0 }, estimate, DELTA_THRESHOLD); }
public void testGibbsAsk_basic() { IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork(); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.SPRINKLER_RV, true) }; GibbsAsk ga = new GibbsAsk(); double[] estimate = ga.gibbsAsk(new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 1000).getValues(); assertArrayEquals(new double[] { 0.3, 0.7 }, estimate, DELTA_THRESHOLD); }
protected static void demoBurglaryAlarmModel(IFiniteProbabilityModel model) { System.Console.WriteLine("--------------------"); System.Console.WriteLine("Burglary Alarm Model"); System.Console.WriteLine("--------------------"); AssignmentProposition aburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, true); AssignmentProposition anotburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, false); AssignmentProposition anotearthquake = new AssignmentProposition( ExampleRV.EARTHQUAKE_RV, false); AssignmentProposition aalarm = new AssignmentProposition( ExampleRV.ALARM_RV, true); AssignmentProposition anotalarm = new AssignmentProposition( ExampleRV.ALARM_RV, false); AssignmentProposition ajohnCalls = new AssignmentProposition( ExampleRV.JOHN_CALLS_RV, true); AssignmentProposition amaryCalls = new AssignmentProposition( ExampleRV.MARY_CALLS_RV, true); // AIMA3e pg. 514 System.Console.WriteLine("P(j,m,a,~b,~e) = " + model.prior(ajohnCalls, amaryCalls, aalarm, anotburglary, anotearthquake)); System.Console.WriteLine("P(j,m,~a,~b,~e) = " + model.prior(ajohnCalls, amaryCalls, anotalarm, anotburglary, anotearthquake)); // AIMA3e. pg. 514 // P<>(Alarm | JohnCalls = true, MaryCalls = true, Burglary = false, // Earthquake = false) // = <0.558, 0.442> System.Console .WriteLine("P<>(Alarm | JohnCalls = true, MaryCalls = true, Burglary = false, Earthquake = false) = " + model.posteriorDistribution(ExampleRV.ALARM_RV, ajohnCalls, amaryCalls, anotburglary, anotearthquake)); // AIMA3e pg. 523 // P<>(Burglary | JohnCalls = true, MaryCalls = true) = <0.284, 0.716> System.Console .WriteLine("P<>(Burglary | JohnCalls = true, MaryCalls = true) = " + model.posteriorDistribution(ExampleRV.BURGLARY_RV, ajohnCalls, amaryCalls)); // AIMA3e pg. 528 // P<>(JohnCalls | Burglary = true) System.Console.WriteLine("P<>(JohnCalls | Burglary = true) = " + model.posteriorDistribution(ExampleRV.JOHN_CALLS_RV, aburglary)); }
public void testRejectionSampling_AIMA3e_pg532() { // AIMA3e pg. 532 IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork(); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.SPRINKLER_RV, true) }; // 400 required as 4 variables and 100 samples planned double[] ma = new double[400]; for (int i = 0; i < ma.Length; i += 4) { // Of the 100 that we generate, suppose // that 73 have Sprinkler = false and are rejected, if (i < (73 * 4)) { ma[i] = 0.5; // i.e Cloudy=true ma[i + 1] = 0.2; // i.e. Sprinkler=false ma[i + 2] = 0.5; // i.e. Rain=true ma[i + 3] = 0.1; // i.e. WetGrass=true } else { ma[i] = 0.5; // i.e Cloudy=true ma[i + 1] = 0.09; // i.e. Sprinkler=true // while 27 have Sprinkler = true; of the 27, // 8 have Rain = true if (i < ((73 + 8) * 4)) { ma[i + 2] = 0.5; // i.e. Rain=true } else { // and 19 have Rain = false. ma[i + 2] = 0.9; // i.e. Rain=false } ma[i + 3] = 0.1; // i.e. WetGrass=true } } MockRandomizer r = new MockRandomizer(ma); RejectionSampling rs = new RejectionSampling(new PriorSample(r)); double[] estimate = rs.rejectionSampling( new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 100) .getValues(); assertArrayEquals(new double[] { 0.2962962962962963, 0.7037037037037037 }, estimate, DELTA_THRESHOLD); }
public void testLikelihoodWeighting_basic() { IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork(); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.SPRINKLER_RV, true) }; MockRandomizer r = new MockRandomizer(new double[] { 0.5, 0.5, 0.5, 0.5 }); LikelihoodWeighting lw = new LikelihoodWeighting(r); double[] estimate = lw.likelihoodWeighting( new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 1000) .getValues(); assertArrayEquals(new double[] { 1.0, 0.0 }, estimate, DELTA_THRESHOLD); }
// AIMA3e pg. 496 protected void test_MeningitisStiffNeckModel_Distributions( IFiniteProbabilityModel model) { AssignmentProposition astiffNeck = new AssignmentProposition( ExampleRV.STIFF_NECK_RV, true); // AIMA3e pg. 497 // P<>(Mengingitis | stiffneck) = α<P(s | m)P(m), P(s | ~m)P(~m)> ICategoricalDistribution dMeningitisGivenStiffNeck = model .posteriorDistribution(ExampleRV.MENINGITIS_RV, astiffNeck); Assert.AreEqual(2, dMeningitisGivenStiffNeck.getValues().Length); Assert.AreEqual(0.0014, dMeningitisGivenStiffNeck.getValues()[0], DELTA_THRESHOLD); Assert.AreEqual(0.9986, dMeningitisGivenStiffNeck.getValues()[1], DELTA_THRESHOLD); }
// AIMA3e pg. 496 protected void test_MeningitisStiffNeckModel(IProbabilityModel model) { Assert.IsTrue(model.isValid()); AssignmentProposition ameningitis = new AssignmentProposition( ExampleRV.MENINGITIS_RV, true); AssignmentProposition anotmeningitis = new AssignmentProposition( ExampleRV.MENINGITIS_RV, false); AssignmentProposition astiffNeck = new AssignmentProposition( ExampleRV.STIFF_NECK_RV, true); AssignmentProposition anotstiffNeck = new AssignmentProposition( ExampleRV.STIFF_NECK_RV, false); // P(stiffNeck | meningitis) = 0.7 Assert.AreEqual(0.7, model.posterior(astiffNeck, ameningitis), DELTA_THRESHOLD); // P(meningitis) = 1/50000 Assert.AreEqual(0.00002, model.prior(ameningitis), DELTA_THRESHOLD); // P(~meningitis) = 1-1/50000 Assert.AreEqual(0.99998, model.prior(anotmeningitis), DELTA_THRESHOLD); // P(stiffNeck) = 0.01 Assert.AreEqual(0.01, model.prior(astiffNeck), DELTA_THRESHOLD); // P(~stiffNeck) = 0.99 Assert.AreEqual(0.99, model.prior(anotstiffNeck), DELTA_THRESHOLD); // P(meningitis | stiffneck) // = P(stiffneck | meningitis)P(meningitis)/P(stiffneck) // = (0.7 * 0.00002)/0.01 // = 0.0014 (13.4) Assert.AreEqual(0.0014, model.posterior(ameningitis, astiffNeck), DELTA_THRESHOLD); // Assuming P(~stiffneck | meningitis) = 0.3 (pg. 497), i.e. CPT (row // must = 1) // // P(meningitis | ~stiffneck) // = P(~stiffneck | meningitis)P(meningitis)/P(~stiffneck) // = (0.3 * 0.00002)/0.99 // = 0.000006060606 Assert.AreEqual(0.000006060606, model.posterior(ameningitis, anotstiffNeck), DELTA_THRESHOLD); }
/** * The population is re-sampled to generate a new population of N samples. * Each new sample is selected from the current population; the probability * that a particular sample is selected is proportional to its weight. The * new samples are un-weighted. * * @param N * the number of samples * @param S * a vector of samples of size N, where each sample is a vector * of assignment propositions for the X_1 state variables, which * is intended to represent the sample for time t * @param W * a vector of weights of size N * * @return a new vector of samples of size N sampled from S based on W */ private AssignmentProposition[][] weightedSampleWithReplacement(int N, AssignmentProposition[][] S, double[] W) { AssignmentProposition[][] newS = new AssignmentProposition[N][]; double[] normalizedW = Util.normalize(W); for (int i = 0; i < N; ++i) { newS[i] = new AssignmentProposition[this.dbn.GetX_0().Size()]; int sample = (int)ProbUtil.sample(randomizer.NextDouble(), sampleIndexes, normalizedW); for (int idx = 0; idx < S_tp1[i].Length; idx++) { AssignmentProposition ap = S_tp1[sample][idx]; newS[i][idx] = new AssignmentProposition(ap.getTermVariable(), ap.getValue()); } } return(newS); }
// function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of // <b>P</b>(X|e) /** * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries * given evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @param N * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public CategoricalDistribution likelihoodWeighting(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn, int N) { // local variables: W, a vector of weighted counts for each value of X, // initially zero double[] W = new double[ProbUtil .expectedSizeOfCategoricalDistribution(X)]; // for j = 1 to N do for (int j = 0; j < N; j++) { // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e) Pair<Map<RandomVariable, Object>, Double> x_w = weightedSample(bn, e); // W[x] <- W[x] + w where x is the value of X in <b>x</b> W[ProbUtil.indexOf(X, x_w.getFirst())] += x_w.getSecond(); } // return NORMALIZE(W) return new ProbabilityTable(W, X).normalize(); }
public ICategoricalDistribution forward(ICategoricalDistribution f1_t, ICollection <AssignmentProposition> e_tp1) { ICategoricalDistribution s1 = new ProbabilityTable(f1_t.getFor()); // Set up required working variables IProposition[] props = new IProposition[s1.getFor().Size()]; int i = 0; foreach (IRandomVariable rv in s1.getFor()) { props[i] = new RandVar(rv.getName(), rv.getDomain()); ++i; } IProposition Xtp1 = ProbUtil.constructConjunction(props); AssignmentProposition[] xt = new AssignmentProposition[tToTm1StateVarMap.Size()]; IMap <IRandomVariable, AssignmentProposition> xtVarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>(); i = 0; foreach (IRandomVariable rv in tToTm1StateVarMap.GetKeys()) { xt[i] = new AssignmentProposition(tToTm1StateVarMap.Get(rv), "<Dummy Value>"); xtVarAssignMap.Put(rv, xt[i]); ++i; } // Step 1: Calculate the 1 time step prediction // ∑<sub>x<sub>t</sub></sub> CategoricalDistributionIterator if1_t = new CategoricalDistributionIteratorImpl(transitionModel, xtVarAssignMap, s1, Xtp1, xt); f1_t.iterateOver(if1_t); // Step 2: multiply by the probability of the evidence // and normalize // <b>P</b>(e<sub>t+1</sub> | X<sub>t+1</sub>) ICategoricalDistribution s2 = sensorModel.posteriorDistribution(ProbUtil .constructConjunction(e_tp1.ToArray()), Xtp1); return(s2.multiplyBy(s1).normalize()); }
public virtual ICategoricalDistribution GetConditioningCase(params object[] parentValues) { if (parentValues.Length != parents.Size()) { throw new IllegalArgumentException( "The number of parent value arguments [" + parentValues.Length + "] is not equal to the number of parents [" + parents.Size() + "] for this CPT."); } AssignmentProposition[] aps = new AssignmentProposition[parentValues.Length]; int idx = 0; foreach (IRandomVariable parentRV in parents) { aps[idx] = new AssignmentProposition(parentRV, parentValues[idx]); idx++; } return(GetConditioningCase(aps)); }
protected static void demoToothacheCavityCatchModel(IFiniteProbabilityModel model) { System.Console.WriteLine("Toothache, Cavity, and Catch Model"); System.Console.WriteLine("----------------------------------"); AssignmentProposition atoothache = new AssignmentProposition( ExampleRV.TOOTHACHE_RV, true); AssignmentProposition acavity = new AssignmentProposition( ExampleRV.CAVITY_RV, true); AssignmentProposition anotcavity = new AssignmentProposition( ExampleRV.CAVITY_RV, false); AssignmentProposition acatch = new AssignmentProposition( ExampleRV.CATCH_RV, true); // AIMA3e pg. 485 System.Console.WriteLine("P(cavity) = " + model.prior(acavity)); System.Console.WriteLine("P(cavity | toothache) = " + model.posterior(acavity, atoothache)); // AIMA3e pg. 492 DisjunctiveProposition cavityOrToothache = new DisjunctiveProposition( acavity, atoothache); System.Console.WriteLine("P(cavity OR toothache) = " + model.prior(cavityOrToothache)); // AIMA3e pg. 493 System.Console.WriteLine("P(~cavity | toothache) = " + model.posterior(anotcavity, atoothache)); // AIMA3e pg. 493 // P<>(Cavity | toothache) = <0.6, 0.4> System.Console.WriteLine("P<>(Cavity | toothache) = " + model.posteriorDistribution(ExampleRV.CAVITY_RV, atoothache)); // AIMA3e pg. 497 // P<>(Cavity | toothache AND catch) = <0.871, 0.129> System.Console.WriteLine("P<>(Cavity | toothache AND catch) = " + model.posteriorDistribution(ExampleRV.CAVITY_RV, atoothache, acatch)); }
public ICategoricalDistribution backward(ICategoricalDistribution b_kp2t, ICollection <AssignmentProposition> e_kp1) { ICategoricalDistribution b_kp1t = new ProbabilityTable(b_kp2t.getFor()); // Set up required working variables IProposition[] props = new IProposition[b_kp1t.getFor().Size()]; int i = 0; foreach (IRandomVariable rv in b_kp1t.getFor()) { IRandomVariable prv = tToTm1StateVarMap.Get(rv); props[i] = new RandVar(prv.getName(), prv.getDomain()); ++i; } IProposition Xk = ProbUtil.constructConjunction(props); AssignmentProposition[] ax_kp1 = new AssignmentProposition[tToTm1StateVarMap.Size()]; IMap <IRandomVariable, AssignmentProposition> x_kp1VarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>(); i = 0; foreach (IRandomVariable rv in b_kp1t.getFor()) { ax_kp1[i] = new AssignmentProposition(rv, "<Dummy Value>"); x_kp1VarAssignMap.Put(rv, ax_kp1[i]); ++i; } IProposition x_kp1 = ProbUtil.constructConjunction(ax_kp1); props = e_kp1.ToArray(); IProposition pe_kp1 = ProbUtil.constructConjunction(props); // ∑<sub>x<sub>k+1</sub></sub> CategoricalDistributionIterator ib_kp2t = new CategoricalDistributionIteratorImpl2(x_kp1VarAssignMap, sensorModel, transitionModel, b_kp1t, pe_kp1, Xk, x_kp1); b_kp2t.iterateOver(ib_kp2t); return(b_kp1t); }
protected void test_BurglaryAlarmModel_Distributions( IFiniteProbabilityModel model) { AssignmentProposition aburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, true); AssignmentProposition anotburglary = new AssignmentProposition( ExampleRV.BURGLARY_RV, false); AssignmentProposition anotearthquake = new AssignmentProposition( ExampleRV.EARTHQUAKE_RV, false); AssignmentProposition ajohnCalls = new AssignmentProposition( ExampleRV.JOHN_CALLS_RV, true); AssignmentProposition amaryCalls = new AssignmentProposition( ExampleRV.MARY_CALLS_RV, true); // AIMA3e. pg. 514 // P<>(Alarm | JohnCalls = true, MaryCalls = true, Burglary = false, // Earthquake = false) // = <0.558, 0.442> assertArrayEquals( new double[] { 0.5577689243027888, 0.44223107569721115 }, model.posteriorDistribution(ExampleRV.ALARM_RV, ajohnCalls, amaryCalls, anotburglary, anotearthquake).getValues(), DELTA_THRESHOLD); // AIMA3e pg. 523 // P<>(Burglary | JohnCalls = true, MaryCalls = true) = <0.284, 0.716> assertArrayEquals( new double[] { 0.2841718353643929, 0.7158281646356071 }, model.posteriorDistribution(ExampleRV.BURGLARY_RV, ajohnCalls, amaryCalls).getValues(), DELTA_THRESHOLD); // AIMA3e pg. 528 // P<>(JohnCalls | Burglary = true) assertArrayEquals(new double[] { 0.8490169999999999, 0.15098299999999998 }, model.posteriorDistribution(ExampleRV.JOHN_CALLS_RV, aburglary) .getValues(), DELTA_THRESHOLD); }
// function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of // <b>P</b>(X|e) /** * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries * given evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network * @param Nsamples * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public CategoricalDistribution rejectionSampling(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn, int Nsamples) { // local variables: <b>N</b>, a vector of counts for each value of X, // initially zero double[] N = new double[ProbUtil .expectedSizeOfCategoricalDistribution(X)]; // for j = 1 to N do for (int j = 0; j < Nsamples; j++) { // <b>x</b> <- PRIOR-SAMPLE(bn) Map<RandomVariable, Object> x = ps.priorSample(bn); // if <b>x</b> is consistent with e then if (isConsistent(x, e)) { // <b>N</b>[x] <- <b>N</b>[x] + 1 // where x is the value of X in <b>x</b> N[ProbUtil.indexOf(X, x)] += 1.0; } } // return NORMALIZE(<b>N</b>) return new ProbabilityTable(N, X).normalize(); }
// END-BayesInference // // // PROTECTED METHODS // /** * <b>Note:</b>Override this method for a more efficient implementation as * outlined in AIMA3e pgs. 527-28. Calculate the hidden variables from the * Bayesian Network. The default implementation does not perform any of * these.<br> * <br> * Two calcuations to be performed here in order to optimize iteration over * the Bayesian Network:<br> * 1. Calculate the hidden variables to be enumerated over. An optimization * (AIMA3e pg. 528) is to remove 'every variable that is not an ancestor of * a query variable or evidence variable as it is irrelevant to the query' * (i.e. sums to 1). 2. The subset of variables from the Bayesian Network to * be retained after irrelevant hidden variables have been removed. * * @param X * the query variables. * @param e * observed values for variables E. * @param bn * a Bayes net with variables {X} ∪ E ∪ Y /* Y = hidden * variables // * @param hidden * to be populated with the relevant hidden variables Y. * @param bnVARS * to be populated with the subset of the random variables * comprising the Bayesian Network with any irrelevant hidden * variables removed. */ protected void calculateVariables(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn, Set<RandomVariable> hidden, List<RandomVariable> bnVARS) { bnVARS.AddRange(bn.getVariablesInTopologicalOrder()); hidden.addAll(bnVARS); foreach (RandomVariable x in X) { hidden.remove(x); } foreach (AssignmentProposition ap in e) { hidden.removeAll(ap.getScope()); } return; }
// END-BayesSampleInference // // // PRIVATE METHODS // private bool isConsistent(Map<RandomVariable, Object> x, AssignmentProposition[] e) { foreach (AssignmentProposition ap in e) { if (!ap.getValue().Equals(x.get(ap.getTermVariable()))) { return false; } } return true; }
// // START-BayesSampleInference public CategoricalDistribution ask(RandomVariable[] X, AssignmentProposition[] observedEvidence, BayesianNetwork bn, int N) { return likelihoodWeighting(X, observedEvidence, bn, N); }
// function WEIGHTED-SAMPLE(bn, e) returns an event and a weight /** * The WEIGHTED-SAMPLE function in Figure 14.15. * * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return return <b>x</b>, w - an event with its associated weight. */ public Pair<Map<RandomVariable, Object>, Double> weightedSample( BayesianNetwork bn, AssignmentProposition[] e) { // w <- 1; double w = 1.0; // <b>x</b> <- an event with n elements initialized from e Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>(); foreach (AssignmentProposition ap in e) { x.Add(ap.getTermVariable(), ap.getValue()); } // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (RandomVariable Xi in bn.getVariablesInTopologicalOrder()) { // if X<sub>i</sub> is an evidence variable with value x<sub>i</sub> // in e if (x.ContainsKey(Xi)) { // then w <- w * P(X<sub>i</sub> = x<sub>i</sub> | // parents(X<sub>i</sub>)) w *= bn.getNode(Xi) .getCPD() .getValue( ProbUtil.getEventValuesForXiGivenParents( bn.getNode(Xi), x)); } else { // else <b>x</b>[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer)); } } // return <b>x</b>, w return new Pair<Map<RandomVariable, Object>, Double>(x, w); }
// AIMA3e pg. 488, 494 protected void test_ToothacheCavityCatchWeatherModel(IProbabilityModel model) { // Should be able to run all the same queries for this independent // sub model. test_ToothacheCavityCatchModel(model); // AIMA3e pg. 486 AssignmentProposition asunny = new AssignmentProposition( ExampleRV.WEATHER_RV, "sunny"); AssignmentProposition arain = new AssignmentProposition( ExampleRV.WEATHER_RV, "rain"); AssignmentProposition acloudy = new AssignmentProposition( ExampleRV.WEATHER_RV, "cloudy"); AssignmentProposition asnow = new AssignmentProposition( ExampleRV.WEATHER_RV, "snow"); Assert.AreEqual(0.6, model.prior(asunny), DELTA_THRESHOLD); Assert.AreEqual(0.1, model.prior(arain), DELTA_THRESHOLD); Assert.AreEqual(0.29, model.prior(acloudy), DELTA_THRESHOLD); Assert.AreEqual(0.01, model.prior(asnow), DELTA_THRESHOLD); // AIMA3e pg. 488 // P(sunny, cavity) // P(sunny AND cavity) AssignmentProposition atoothache = new AssignmentProposition( ExampleRV.TOOTHACHE_RV, true); AssignmentProposition acatch = new AssignmentProposition( ExampleRV.CATCH_RV, true); AssignmentProposition acavity = new AssignmentProposition( ExampleRV.CAVITY_RV, true); ConjunctiveProposition sunnyAndCavity = new ConjunctiveProposition( asunny, acavity); // 0.6 (sunny) * 0.2 (cavity) = 0.12 Assert.AreEqual(0.12, model.prior(asunny, acavity), DELTA_THRESHOLD); Assert.AreEqual(0.12, model.prior(sunnyAndCavity), DELTA_THRESHOLD); // AIMA3e pg. 494 // P(toothache, catch, cavity, cloudy) = // P(cloudy | toothache, catch, cavity)P(toothache, catch, cavity) Assert.AreEqual( model.prior(atoothache, acatch, acavity, acloudy), model.posterior(acloudy, atoothache, acatch, acavity) * model.prior(atoothache, acatch, acavity), DELTA_THRESHOLD); ConjunctiveProposition toothacheAndCatchAndCavityAndCloudy = new ConjunctiveProposition( new ConjunctiveProposition(atoothache, acatch), new ConjunctiveProposition(acavity, acloudy)); ConjunctiveProposition toothacheAndCatchAndCavity = new ConjunctiveProposition( new ConjunctiveProposition(atoothache, acatch), acavity); Assert.AreEqual( model.prior(toothacheAndCatchAndCavityAndCloudy), model.posterior(acloudy, atoothache, acatch, acavity) * model.prior(toothacheAndCatchAndCavity), DELTA_THRESHOLD); // P(cloudy | toothache, catch, cavity) = P(cloudy) // (13.10) Assert.AreEqual( model.posterior(acloudy, atoothache, acatch, acavity), model.prior(acloudy), DELTA_THRESHOLD); // P(toothache, catch, cavity, cloudy) = // P(cloudy)P(tootache, catch, cavity) Assert.AreEqual( model.prior(atoothache, acatch, acavity, acloudy), model.prior(acloudy) * model.prior(atoothache, acatch, acavity), DELTA_THRESHOLD); // P(a | b) = P(a) Assert.AreEqual(model.posterior(acavity, acloudy), model.prior(acavity), DELTA_THRESHOLD); // P(b | a) = P(b) Assert.AreEqual(model.posterior(acloudy, acavity), model.prior(acloudy), DELTA_THRESHOLD); // P(a AND b) = P(a)P(b) Assert.AreEqual(model.prior(acavity, acloudy), model.prior(acavity) * model.prior(acloudy), DELTA_THRESHOLD); ConjunctiveProposition acavityAndacloudy = new ConjunctiveProposition( acavity, acloudy); Assert.AreEqual(model.prior(acavityAndacloudy), model.prior(acavity) * model.prior(acloudy), DELTA_THRESHOLD); }
public void test_AIMA3e_Fig15_18() { IRandom mr = new MockRandomizer(new double[] { // Prior Sample: // 8 with Rain_t-1=true from prior distribution 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, 0.5, // 2 with Rain_t-1=false from prior distribution 0.6, 0.6, // (a) Propagate 6 samples Rain_t=true 0.7, 0.7, 0.7, 0.7, 0.7, 0.7, // 4 samples Rain_t=false 0.71, 0.71, 0.31, 0.31, // (b) Weight should be for first 6 samples: // Rain_t-1=true, Rain_t=true, Umbrella_t=false = 0.1 // Next 2 samples: // Rain_t-1=true, Rain_t=false, Umbrealla_t=false= 0.8 // Final 2 samples: // Rain_t-1=false, Rain_t=false, Umbrella_t=false = 0.8 // gives W[] = // [0.1, 0.1, 0.1, 0.1, 0.1, 0.1, 0.8, 0.8, 0.8, 0.8] // normalized = // [0.026, ...., 0.211, ....] is approx. 0.156 = true // the remainder is false // (c) Resample 2 Rain_t=true, 8 Rain_t=false 0.15, 0.15, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, // // Next Sample: // (a) Propagate 1 samples Rain_t=true 0.7, // 9 samples Rain_t=false 0.71, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, 0.31, // (c) resample 1 Rain_t=true, 9 Rain_t=false 0.0001, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2, 0.2 }); int N = 10; ParticleFiltering pf = new ParticleFiltering(N, DynamicBayesNetExampleFactory.getUmbrellaWorldNetwork(), mr); AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false) }; AssignmentProposition[][] S = pf.particleFiltering(e); Assert.AreEqual(N, S.Length); for (int i = 0; i < N; ++i) { Assert.AreEqual(1, S[i].Length); AssignmentProposition ap = S[i][0]; Assert.AreEqual(ExampleRV.RAIN_t_RV, ap.getTermVariable()); if (i < 2) { Assert.AreEqual(true, ap.getValue()); } else { Assert.AreEqual(false, ap.getValue()); } } // Generate next sample to ensure everything roles forward ok // in this case with prefixed probabilities only expect 1 Rain_t=true S = pf.particleFiltering(e); Assert.AreEqual(N, S.Length); for (int i = 0; i < N; ++i) { Assert.AreEqual(1, S[i].Length); AssignmentProposition ap = S[i][0]; Assert.AreEqual(ExampleRV.RAIN_t_RV, ap.getTermVariable()); if (i < 1) { Assert.AreEqual(true, ap.getValue()); } else { Assert.AreEqual(false, ap.getValue()); } } }
protected void test_ToothacheCavityCatchModel(IProbabilityModel model) { Assert.IsTrue(model.isValid()); AssignmentProposition atoothache = new AssignmentProposition( ExampleRV.TOOTHACHE_RV, true); AssignmentProposition anottoothache = new AssignmentProposition( ExampleRV.TOOTHACHE_RV, false); AssignmentProposition acavity = new AssignmentProposition( ExampleRV.CAVITY_RV, true); AssignmentProposition anotcavity = new AssignmentProposition( ExampleRV.CAVITY_RV, false); AssignmentProposition acatch = new AssignmentProposition( ExampleRV.CATCH_RV, true); AssignmentProposition anotcatch = new AssignmentProposition( ExampleRV.CATCH_RV, false); // AIMA3e pg. 485 Assert.AreEqual(0.2, model.prior(acavity), DELTA_THRESHOLD); Assert.AreEqual(0.6, model.posterior(acavity, atoothache), DELTA_THRESHOLD); ConjunctiveProposition toothacheAndNotCavity = new ConjunctiveProposition( atoothache, anotcavity); Assert.AreEqual(0.0, model.posterior(acavity, toothacheAndNotCavity), DELTA_THRESHOLD); Assert.AreEqual(0.0, model.posterior(acavity, atoothache, anotcavity), DELTA_THRESHOLD); // AIMA3e pg. 492 DisjunctiveProposition cavityOrToothache = new DisjunctiveProposition( acavity, atoothache); Assert.AreEqual(0.28, model.prior(cavityOrToothache), DELTA_THRESHOLD); // AIMA3e pg. 493 Assert.AreEqual(0.4, model.posterior(anotcavity, atoothache), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.prior(ExampleRV.TOOTHACHE_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.prior(ExampleRV.CAVITY_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.prior(ExampleRV.CATCH_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV, ExampleRV.CATCH_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CAVITY_RV, ExampleRV.CATCH_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CATCH_RV, ExampleRV.TOOTHACHE_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV), DELTA_THRESHOLD); // AIMA3e pg. 495 - Bayes' Rule // P(b|a) = P(a|b)P(b)/P(a) Assert.AreEqual(model.posterior(acavity, atoothache), (model.posterior(atoothache, acavity) * model.prior(acavity)) / model.prior(atoothache), DELTA_THRESHOLD); Assert.AreEqual( model.posterior(acavity, anottoothache), (model.posterior(anottoothache, acavity) * model.prior(acavity)) / model.prior(anottoothache), DELTA_THRESHOLD); Assert.AreEqual( model.posterior(anotcavity, atoothache), (model.posterior(atoothache, anotcavity) * model .prior(anotcavity)) / model.prior(atoothache), DELTA_THRESHOLD); Assert.AreEqual( model.posterior(anotcavity, anottoothache), (model.posterior(anottoothache, anotcavity) * model .prior(anotcavity)) / model.prior(anottoothache), DELTA_THRESHOLD); // Assert.AreEqual(model.posterior(acavity, acatch), (model.posterior(acatch, acavity) * model.prior(acavity)) / model.prior(acatch), DELTA_THRESHOLD); Assert.AreEqual(model.posterior(acavity, anotcatch), (model.posterior(anotcatch, acavity) * model.prior(acavity)) / model.prior(anotcatch), DELTA_THRESHOLD); Assert.AreEqual(model.posterior(anotcavity, acatch), (model.posterior(acatch, anotcavity) * model.prior(anotcavity)) / model.prior(acatch), DELTA_THRESHOLD); Assert.AreEqual( model.posterior(anotcavity, anotcatch), (model.posterior(anotcatch, anotcavity) * model .prior(anotcavity)) / model.prior(anotcatch), DELTA_THRESHOLD); }
// // PRIVATE METHODS // private Factor makeFactor(RandomVariable var, AssignmentProposition[] e, BayesianNetwork bn) { Node n = bn.getNode(var); if (!(n is FiniteNode)) { throw new IllegalArgumentException( "Elimination-Ask only works with finite Nodes."); } FiniteNode fn = (FiniteNode) n; List<AssignmentProposition> evidence = new List<AssignmentProposition>(); foreach (AssignmentProposition ap in e) { if (fn.getCPT().contains(ap.getTermVariable())) { evidence.Add(ap); } } return fn.getCPT().getFactorFor( evidence.ToArray()); }
// // PROTECTED METHODS // protected void test_RollingPairFairDiceModel(IProbabilityModel model) { Assert.IsTrue(model.isValid()); // Ensure each dice has 1/6 probability for (int d = 1; d <= 6; d++) { AssignmentProposition ad1 = new AssignmentProposition( ExampleRV.DICE_1_RV, d); AssignmentProposition ad2 = new AssignmentProposition( ExampleRV.DICE_2_RV, d); Assert.AreEqual(1.0 / 6.0, model.prior(ad1), DELTA_THRESHOLD); Assert.AreEqual(1.0 / 6.0, model.prior(ad2), DELTA_THRESHOLD); } // Ensure each combination is 1/36 for (int d1 = 1; d1 <= 6; d1++) { for (int d2 = 1; d2 <= 6; d2++) { AssignmentProposition ad1 = new AssignmentProposition( ExampleRV.DICE_1_RV, d1); AssignmentProposition ad2 = new AssignmentProposition( ExampleRV.DICE_2_RV, d2); ConjunctiveProposition d1AndD2 = new ConjunctiveProposition( ad1, ad2); Assert.AreEqual(1.0 / 6.0, model.prior(ad1), DELTA_THRESHOLD); Assert.AreEqual(1.0 / 6.0, model.prior(ad2), DELTA_THRESHOLD); // pg. 485 AIMA3e Assert.AreEqual(1.0 / 36.0, model.prior(ad1, ad2), DELTA_THRESHOLD); Assert.AreEqual(1.0 / 36.0, model.prior(d1AndD2), DELTA_THRESHOLD); Assert.AreEqual(1.0 / 6.0, model.posterior(ad1, ad2), DELTA_THRESHOLD); Assert.AreEqual(1.0 / 6.0, model.posterior(ad2, ad1), DELTA_THRESHOLD); } } // Test Sets of events defined via constraint propositions IntegerSumProposition total11 = new IntegerSumProposition("Total11", new FiniteIntegerDomain(11), ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); Assert.AreEqual(2.0 / 36.0, model.prior(total11), DELTA_THRESHOLD); EquivalentProposition doubles = new EquivalentProposition("Doubles", ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); Assert.AreEqual(1.0 / 6.0, model.prior(doubles), DELTA_THRESHOLD); SubsetProposition evenDice1 = new SubsetProposition("EvenDice1", new FiniteIntegerDomain(2, 4, 6), ExampleRV.DICE_1_RV); Assert.AreEqual(0.5, model.prior(evenDice1), DELTA_THRESHOLD); SubsetProposition oddDice2 = new SubsetProposition("OddDice2", new FiniteIntegerDomain(1, 3, 5), ExampleRV.DICE_2_RV); Assert.AreEqual(0.5, model.prior(oddDice2), DELTA_THRESHOLD); // pg. 485 AIMA3e AssignmentProposition dice1Is5 = new AssignmentProposition( ExampleRV.DICE_1_RV, 5); Assert.AreEqual(1.0 / 6.0, model.posterior(doubles, dice1Is5), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.prior(ExampleRV.DICE_1_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.prior(ExampleRV.DICE_2_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV), DELTA_THRESHOLD); Assert.AreEqual(1.0, model.posterior(ExampleRV.DICE_2_RV, ExampleRV.DICE_1_RV), DELTA_THRESHOLD); // Test a disjunctive proposition pg.489 // P(a OR b) = P(a) + P(b) - P(a AND b) // = 1/6 + 1/6 - 1/36 AssignmentProposition dice2Is5 = new AssignmentProposition( ExampleRV.DICE_2_RV, 5); DisjunctiveProposition dice1Is5OrDice2Is5 = new DisjunctiveProposition( dice1Is5, dice2Is5); Assert.AreEqual(1.0 / 6.0 + 1.0 / 6.0 - 1.0 / 36.0, model.prior(dice1Is5OrDice2Is5), DELTA_THRESHOLD); }
// // PROTECTED // protected void test_RollingPairFairDiceModel_Distributions(IFiniteProbabilityModel model) { AssignmentProposition ad1_1 = new AssignmentProposition(ExampleRV.DICE_1_RV, 1); ICategoricalDistribution dD1_1 = model.priorDistribution(ad1_1); assertArrayEquals(new double[] { 1.0 / 6.0 }, dD1_1.getValues(), DELTA_THRESHOLD); ICategoricalDistribution dPriorDice1 = model.priorDistribution(ExampleRV.DICE_1_RV); assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 }, dPriorDice1.getValues(), DELTA_THRESHOLD); ICategoricalDistribution dPriorDice2 = model.priorDistribution(ExampleRV.DICE_2_RV); assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 }, dPriorDice2.getValues(), DELTA_THRESHOLD); ICategoricalDistribution dJointDice1Dice2 = model.jointDistribution(ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); Assert.AreEqual(36, dJointDice1Dice2.getValues().Length); for (int i = 0; i < dJointDice1Dice2.getValues().Length; i++) { Assert.AreEqual(1.0 / 36.0, dJointDice1Dice2.getValues()[i], DELTA_THRESHOLD); } ICategoricalDistribution dJointDice2Dice1 = model.jointDistribution(ExampleRV.DICE_2_RV, ExampleRV.DICE_1_RV); Assert.AreEqual(36, dJointDice2Dice1.getValues().Length); for (int i = 0; i < dJointDice2Dice1.getValues().Length; i++) { Assert.AreEqual(1.0 / 36.0, dJointDice2Dice1.getValues()[i], DELTA_THRESHOLD); } // // Test Sets of events IntegerSumProposition total11 = new IntegerSumProposition("Total", new FiniteIntegerDomain(11), ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); // P<>(Total = 11) = <2.0/36.0> assertArrayEquals(new double[] { 2.0 / 36.0 }, model.priorDistribution(total11).getValues(), DELTA_THRESHOLD); // P<>(Dice1, Total = 11) // = <0.0, 0.0, 0.0, 0.0, 1.0/36.0, 1.0/36.0> assertArrayEquals(new double[] { 0, 0, 0, 0, 1.0 / 36.0, 1.0 / 36.0 }, model.priorDistribution(ExampleRV.DICE_1_RV, total11) .getValues(), DELTA_THRESHOLD); EquivalentProposition doubles = new EquivalentProposition("Doubles", ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); // P(Doubles) = <1.0/6.0> assertArrayEquals(new double[] { 1.0 / 6.0 }, model .priorDistribution(doubles).getValues(), DELTA_THRESHOLD); // // Test posterior // // P<>(Dice1, Total = 11) // = <0.0, 0.0, 0.0, 0.0, 0.5, 0.5> assertArrayEquals(new double[] { 0, 0, 0, 0, 0.5, 0.5 }, model .posteriorDistribution(ExampleRV.DICE_1_RV, total11) .getValues(), DELTA_THRESHOLD); // P<>(Dice1 | Doubles) = <1/6, 1/6, 1/6, 1/6, 1/6, 1/6> assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 }, model .posteriorDistribution(ExampleRV.DICE_1_RV, doubles) .getValues(), DELTA_THRESHOLD); ICategoricalDistribution dPosteriorDice1GivenDice2 = model .posteriorDistribution(ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV); Assert.AreEqual(36, dPosteriorDice1GivenDice2.getValues().Length); for (int i = 0; i < dPosteriorDice1GivenDice2.getValues().Length; i++) { Assert.AreEqual(1.0 / 6.0, dPosteriorDice1GivenDice2.getValues()[i], DELTA_THRESHOLD); } ICategoricalDistribution dPosteriorDice2GivenDice1 = model .posteriorDistribution(ExampleRV.DICE_2_RV, ExampleRV.DICE_1_RV); Assert.AreEqual(36, dPosteriorDice2GivenDice1.getValues().Length); for (int i = 0; i < dPosteriorDice2GivenDice1.getValues().Length; i++) { Assert.AreEqual(1.0 / 6.0, dPosteriorDice2GivenDice1.getValues()[i], DELTA_THRESHOLD); } }
protected void test_ToothacheCavityCatchModel_Distributions(IFiniteProbabilityModel model) { AssignmentProposition atoothache = new AssignmentProposition(ExampleRV.TOOTHACHE_RV, true); AssignmentProposition anottoothache = new AssignmentProposition(ExampleRV.TOOTHACHE_RV, false); AssignmentProposition acatch = new AssignmentProposition(ExampleRV.CATCH_RV, true); AssignmentProposition anotcatch = new AssignmentProposition(ExampleRV.CATCH_RV, false); // AIMA3e pg. 493 // P<>(Cavity | toothache) = <0.6, 0.4> assertArrayEquals(new double[] { 0.6, 0.4 }, model .posteriorDistribution(ExampleRV.CAVITY_RV, atoothache) .getValues(), DELTA_THRESHOLD); // AIMA3e pg. 497 // P<>(Cavity | toothache AND catch) = <0.871, 0.129> assertArrayEquals(new double[] { 0.8709677419354839, 0.12903225806451615 }, model.posteriorDistribution(ExampleRV.CAVITY_RV, atoothache, acatch).getValues(), DELTA_THRESHOLD); // AIMA3e pg. 498 // (13.17) // P<>(toothache AND catch | Cavity) // = P<>(toothache | Cavity)P<>(catch | Cavity) ConjunctiveProposition toothacheAndCatch = new ConjunctiveProposition(atoothache, acatch); assertArrayEquals(model.posteriorDistribution(toothacheAndCatch, ExampleRV.CAVITY_RV).getValues(), model.posteriorDistribution(atoothache, ExampleRV.CAVITY_RV) .multiplyBy( model.posteriorDistribution(acatch, ExampleRV.CAVITY_RV)).getValues(), DELTA_THRESHOLD); // (13.18) // P<>(Cavity | toothache AND catch) // = αP<>(toothache | Cavity)P<>(catch | Cavity)P(Cavity) assertArrayEquals(model.posteriorDistribution(ExampleRV.CAVITY_RV, toothacheAndCatch).getValues(), model.posteriorDistribution(atoothache, ExampleRV.CAVITY_RV) .multiplyBy( model.posteriorDistribution(acatch, ExampleRV.CAVITY_RV)) .multiplyBy( model.priorDistribution(ExampleRV.CAVITY_RV)) .normalize().getValues(), DELTA_THRESHOLD); // (13.19) // P<>(Toothache, Catch | Cavity) // = P<>(Toothache | Cavity)P<>(Catch | Cavity) ConjunctiveProposition toothacheAndCatchRV = new ConjunctiveProposition(ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV); assertArrayEquals(model.posteriorDistribution(toothacheAndCatchRV, ExampleRV.CAVITY_RV).getValues(), model.posteriorDistribution(ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV) .multiplyByPOS( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV), ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV).getValues(), DELTA_THRESHOLD); // (product rule) // P<>(Toothache, Catch, Cavity) // = P<>(Toothache, Catch | Cavity)P<>(Cavity) assertArrayEquals(model.priorDistribution(ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV).getValues(), model.posteriorDistribution(toothacheAndCatchRV, ExampleRV.CAVITY_RV) .multiplyBy( model.priorDistribution(ExampleRV.CAVITY_RV)) .getValues(), DELTA_THRESHOLD); // (using 13.19) // P<>(Toothache, Catch | Cavity)P<>(Cavity) // = P<>(Toothache | Cavity)P<>(Catch | Cavity)P<>(Cavity) assertArrayEquals(model.posteriorDistribution(toothacheAndCatchRV, ExampleRV.CAVITY_RV) .multiplyBy( model.priorDistribution(ExampleRV.CAVITY_RV)) .getValues(), model.posteriorDistribution(ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV) .multiplyByPOS( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV) .multiplyBy( model.priorDistribution(ExampleRV.CAVITY_RV)), ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV).getValues(), DELTA_THRESHOLD); // // P<>(Toothache, Catch, Cavity) // = P<>(Toothache | Cavity)P<>(Catch | Cavity)P<>(Cavity) assertArrayEquals(model.priorDistribution(ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV).getValues(), model.posteriorDistribution(ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV) .multiplyByPOS( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV), ExampleRV.TOOTHACHE_RV, ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV) .multiplyBy( model.priorDistribution(ExampleRV.CAVITY_RV)) .getValues(), DELTA_THRESHOLD); // AIMA3e pg. 496 // General case of Bayes' Rule // P<>(Y | X) = P<>(X | Y)P<>(Y)/P<>(X) // Note: Performing in this order - // P<>(Y | X) = (P<>(Y)P<>(X | Y))/P<>(X) // as default multiplication of distributions are not commutative (could // also use pointwiseProductPOS() to specify the order). assertArrayEquals(model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV).getValues(), model.priorDistribution(ExampleRV.CAVITY_RV) .multiplyBy( model.posteriorDistribution( ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV)) .divideBy( model.priorDistribution(ExampleRV.TOOTHACHE_RV)) .getValues(), DELTA_THRESHOLD); assertArrayEquals( model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.CATCH_RV).getValues(), model.priorDistribution(ExampleRV.CAVITY_RV) .multiplyBy( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV)) .divideBy(model.priorDistribution(ExampleRV.CATCH_RV)) .getValues(), DELTA_THRESHOLD); // General Bayes' Rule conditionalized on background evidence e (13.3) // P<>(Y | X, e) = P<>(X | Y, e)P<>(Y|e)/P<>(X | e) // Note: Performing in this order - // P<>(Y | X, e) = (P<>(Y|e)P<>(X | Y, e)))/P<>(X | e) // as default multiplication of distributions are not commutative (could // also use pointwiseProductPOS() to specify the order). assertArrayEquals( model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV, acatch).getValues(), model.posteriorDistribution(ExampleRV.CAVITY_RV, acatch) .multiplyBy( model.posteriorDistribution( ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV, acatch)) .divideBy( model.posteriorDistribution( ExampleRV.TOOTHACHE_RV, acatch)) .getValues(), DELTA_THRESHOLD); // assertArrayEquals( model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.TOOTHACHE_RV, anotcatch).getValues(), model.posteriorDistribution(ExampleRV.CAVITY_RV, anotcatch) .multiplyBy( model.posteriorDistribution( ExampleRV.TOOTHACHE_RV, ExampleRV.CAVITY_RV, anotcatch)) .divideBy( model.posteriorDistribution( ExampleRV.TOOTHACHE_RV, anotcatch)) .getValues(), DELTA_THRESHOLD); // assertArrayEquals( model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.CATCH_RV, atoothache).getValues(), model.posteriorDistribution(ExampleRV.CAVITY_RV, atoothache) .multiplyBy( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV, atoothache)) .divideBy( model.posteriorDistribution(ExampleRV.CATCH_RV, atoothache)).getValues(), DELTA_THRESHOLD); assertArrayEquals( model.posteriorDistribution(ExampleRV.CAVITY_RV, ExampleRV.CATCH_RV, anottoothache).getValues(), model.posteriorDistribution(ExampleRV.CAVITY_RV, anottoothache) .multiplyBy( model.posteriorDistribution(ExampleRV.CATCH_RV, ExampleRV.CAVITY_RV, anottoothache)) .divideBy( model.posteriorDistribution(ExampleRV.CATCH_RV, anottoothache)).getValues(), DELTA_THRESHOLD); }
// // START-BayesInference public CategoricalDistribution ask(RandomVariable[] X, AssignmentProposition[] observedEvidence, BayesianNetwork bn) { return this.eliminationAsk(X, observedEvidence, bn); }