public ICategoricalDistribution jointDistribution(params IProposition[] propositions) { ProbabilityTable d = null; IProposition conjProp = ProbUtil.constructConjunction(propositions); ISet <IRandomVariable> vars = CollectionFactory.CreateSet <IRandomVariable>(conjProp.getUnboundScope()); if (vars.Size() > 0) { IRandomVariable[] distVars = vars.ToArray(); ProbabilityTable ud = new ProbabilityTable(distVars); object[] values = new object[vars.Size()]; ProbabilityTable.ProbabilityTableIterator di = new ProbabilityTableIterator(conjProp, ud, values, vars); distribution.iterateOverTable(di); d = ud; } else { // No Unbound Variables, therefore just return // the singular probability related to the proposition. d = new ProbabilityTable(); d.setValue(0, prior(propositions)); } return(d); }
public virtual ICategoricalDistribution jointDistribution(params IProposition[] propositions) { ProbabilityTable d = null; IProposition conjProp = ProbUtil.constructConjunction(propositions); ISet <IRandomVariable> vars = CollectionFactory.CreateSet <IRandomVariable>(conjProp.getUnboundScope()); if (vars.Size() > 0) { IRandomVariable[] distVars = new IRandomVariable[vars.Size()]; int i = 0; foreach (IRandomVariable rv in vars) { distVars[i] = rv; ++i; } ProbabilityTable ud = new ProbabilityTable(distVars); object[] values = new object[vars.Size()]; CategoricalDistributionIterator di = new CategoricalDistributionIteratorJointDistribution(conjProp, vars, ud, values); IRandomVariable[] X = conjProp.getScope().ToArray(); bayesInference.Ask(X, new AssignmentProposition[0], bayesNet).iterateOver(di); d = ud; } else { // No Unbound Variables, therefore just return // the singular probability related to the proposition. d = new ProbabilityTable(); d.setValue(0, prior(propositions)); } return(d); }
public virtual ICategoricalDistribution posteriorDistribution(IProposition phi, params IProposition[] evidence) { IProposition conjEvidence = ProbUtil.constructConjunction(evidence); // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e) ICategoricalDistribution dAandB = jointDistribution(phi, conjEvidence); ICategoricalDistribution dEvidence = jointDistribution(conjEvidence); ICategoricalDistribution rVal = dAandB.divideBy(dEvidence); // Note: Need to ensure normalize() is called // in order to handle the case where an approximate // algorithm is used (i.e. won't evenly divide // as will have calculated on separate approximate // runs). However, this should only be done // if the all of the evidences scope are bound (if not // you are returning in essence a set of conditional // distributions, which you do not want normalized). bool unboundEvidence = false; foreach (IProposition e in evidence) { if (e.getUnboundScope().Size() > 0) { unboundEvidence = true; break; } } if (!unboundEvidence) { rVal.normalize(); } return(rVal); }
private void sampleFromTransitionModel(int i) { // x <- an event initialized with S[i] IMap <IRandomVariable, object> x = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); for (int n = 0; n < S[i].Length; n++) { AssignmentProposition x1 = S[i][n]; x.Put(this.dbn.GetX_1_to_X_0().Get(x1.getTermVariable()), x1.getValue()); } // foreach variable X<sub>1<sub>i</sub></sub> in // X<sub>1<sub>1</sub></sub>,...,X<sub>1<sub>n<</sub>/sub> do foreach (IRandomVariable X1_i in dbn.GetX_1_VariablesInTopologicalOrder()) { // x1[i] <- a random sample from // <b>P</b>(X<sub>1<sub>i</sub></sub> | // parents(X<sub>1<sub>i</sub></sub>)) x.Put(X1_i, ProbUtil.randomSample(dbn.GetNode(X1_i), x, randomizer)); } // S[i] <- sample from <b>P</b>(<b>X</b><sub>1</sub> | // <b>X</b><sub>0</sub> = S[i]) for (int n = 0; n < S_tp1[i].Length; n++) { AssignmentProposition x1 = S_tp1[i][n]; x1.setValue(x.Get(x1.getTermVariable())); } }
/** * Taken {@code weightedSampleWithReplacement} out of {@link ParticleFiltering} and extended by a minimum weight. * @param samples the samples to be re-sampled. * @param w the probability distribution on the samples. * @return the new set of samples. */ protected ISet <P> extendedWeightedSampleWithReplacement(ISet <P> samples, double[] w) { int i = 0; for (; i < samples.Size(); ++i) { if (w[i] > weightCutOff) { break; } } if (i >= samples.Size()) { return(generateCloud(samples.Size())); /*If all particleCloud are below weightCutOff, generate a new set of samples, as we are lost.*/ } /*WEIGHTED-SAMPLE-WITH-REPLACEMENT:*/ double[] normalizedW = Util.normalize(w); ISet <P> newSamples = CollectionFactory.CreateSet <P>(); P[] array = samples.ToArray(); for (i = 0; i < samples.Size(); ++i) { int selectedSample = (int)ProbUtil.sample(randomizer.NextDouble(), sampleIndexes, normalizedW); newSamples.Add((array[selectedSample]).Clone()); } return(newSamples); }
// function WEIGHTED-SAMPLE(bn, e) returns an event and a weight /** * The WEIGHTED-SAMPLE function in Figure 14.15. * * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return return <b>x</b>, w - an event with its associated weight. */ public Pair <IMap <IRandomVariable, object>, double> weightedSample(IBayesianNetwork bn, AssignmentProposition[] e) { // w <- 1; double w = 1.0; // <b>x</b> <- an event with n elements initialized from e IMap <IRandomVariable, object> x = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); foreach (AssignmentProposition ap in e) { x.Put(ap.getTermVariable(), ap.getValue()); } // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (IRandomVariable Xi in bn.GetVariablesInTopologicalOrder()) { // if X<sub>i</sub> is an evidence variable with value x<sub>i</sub> // in e if (x.ContainsKey(Xi)) { // then w <- w * P(X<sub>i</sub> = x<sub>i</sub> | // parents(X<sub>i</sub>)) w *= bn.GetNode(Xi) .GetCPD() .GetValue(ProbUtil.getEventValuesForXiGivenParents(bn.GetNode(Xi), x)); } else { // else <b>x</b>[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Put(Xi, ProbUtil.randomSample(bn.GetNode(Xi), x, randomizer)); } } // return <b>x</b>, w return(new Pair <IMap <IRandomVariable, object>, double>(x, w)); }
// function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e) /** * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given * evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @param Nsamples * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public CategoricalDistribution gibbsAsk(RandomVariable[] X, AssignmentProposition[] e, BayesianNetwork bn, int Nsamples) { // local variables: <b>N</b>, a vector of counts for each value of X, // initially zero double[] N = new double[ProbUtil .expectedSizeOfCategoricalDistribution(X)]; // Z, the nonevidence variables in bn Set <RandomVariable> Z = new Set <RandomVariable>( bn.getVariablesInTopologicalOrder()); foreach (AssignmentProposition ap in e) { Z.Remove(ap.getTermVariable()); } // <b>x</b>, the current state of the network, initially copied from e Map <RandomVariable, Object> x = new LinkedHashMap <RandomVariable, Object>(); foreach (AssignmentProposition ap in e) { x.Add(ap.getTermVariable(), ap.getValue()); } // initialize <b>x</b> with random values for the variables in Z foreach (RandomVariable Zi in Z) { x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer)); } // for j = 1 to N do for (int j = 0; j < Nsamples; j++) { // for each Z<sub>i</sub> in Z do foreach (RandomVariable Zi in Z) { // set the value of Z<sub>i</sub> in <b>x</b> by sampling from // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>)) x.put(Zi, ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer)); } // Note: moving this outside the previous for loop, // as described in fig 14.6, as will only work // correctly in the case of a single query variable X. // However, when multiple query variables, rare events // will get weighted incorrectly if done above. In case // of single variable this does not happen as each possible // value gets * |Z| above, ending up with the same ratios // when normalized (i.e. its still more efficient to place // outside the loop). // // <b>N</b>[x] <- <b>N</b>[x] + 1 // where x is the value of X in <b>x</b> N[ProbUtil.indexOf(X, x)] += 1.0; } // return NORMALIZE(<b>N</b>) return(new ProbabilityTable(N, X).normalize()); }
public ICategoricalDistribution posteriorDistribution(IProposition phi, params IProposition[] evidence) { IProposition conjEvidence = ProbUtil.constructConjunction(evidence); // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e) ICategoricalDistribution dAandB = jointDistribution(phi, conjEvidence); ICategoricalDistribution dEvidence = jointDistribution(conjEvidence); return(dAandB.divideBy(dEvidence)); }
public void test_indexesOfValue() { RandVar X = new RandVar("X", new BooleanDomain()); RandVar Y = new RandVar("Y", new ArbitraryTokenDomain("A", "B", "C")); RandVar Z = new RandVar("Z", new BooleanDomain()); // An ordered X,Y,Z enumeration of values should look like: // 00: true, A, true // 01: true, A, false // 02: true, B, true // 03: true, B, false // 04: true, C, true // 05: true, C, false // 06: false, A, true // 07: false, A, false // 08: false, B, true // 09: false, B, false // 10: false, C, true // 11: false, C, false IRandomVariable[] vars = new IRandomVariable[] { X, Y, Z }; IMap <IRandomVariable, object> even = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); even.Put(X, true); CollectionAssert.AreEqual(new int[] { 0, 1, 2, 3, 4, 5 }, ProbUtil.indexesOfValue(vars, 0, even)); even.Put(X, false); CollectionAssert.AreEqual(new int[] { 6, 7, 8, 9, 10, 11 }, ProbUtil.indexesOfValue(vars, 0, even)); even.Put(Y, "A"); CollectionAssert.AreEqual(new int[] { 0, 1, 6, 7 }, ProbUtil.indexesOfValue(vars, 1, even)); even.Put(Y, "B"); CollectionAssert.AreEqual(new int[] { 2, 3, 8, 9 }, ProbUtil.indexesOfValue(vars, 1, even)); even.Put(Y, "C"); CollectionAssert.AreEqual(new int[] { 4, 5, 10, 11 }, ProbUtil.indexesOfValue(vars, 1, even)); even.Put(Z, true); CollectionAssert.AreEqual(new int[] { 0, 2, 4, 6, 8, 10 }, ProbUtil.indexesOfValue(vars, 2, even)); even.Put(Z, false); CollectionAssert.AreEqual(new int[] { 1, 3, 5, 7, 9, 11 }, ProbUtil.indexesOfValue(vars, 2, even)); }
// function PRIOR-SAMPLE(bn) returns an event sampled from the prior // specified by bn /** * The PRIOR-SAMPLE algorithm in Figure 14.13. A sampling algorithm that * generates events from a Bayesian network. Each variable is sampled * according to the conditional distribution given the values already * sampled for the variable's parents. * * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return an event sampled from the prior specified by bn */ public Map <RandomVariable, Object> priorSample(BayesianNetwork bn) { // x <- an event with n elements Map <RandomVariable, Object> x = new LinkedHashMap <RandomVariable, Object>(); // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (RandomVariable Xi in bn.getVariablesInTopologicalOrder()) { // x[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer)); } // return x return(x); }
// function PRIOR-SAMPLE(bn) returns an event sampled from the prior // specified by bn /** * The PRIOR-SAMPLE algorithm in Figure 14.13. A sampling algorithm that * generates events from a Bayesian network. Each variable is sampled * according to the conditional distribution given the values already * sampled for the variable's parents. * * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @return an event sampled from the prior specified by bn */ public IMap <IRandomVariable, object> priorSample(IBayesianNetwork bn) { // x <- an event with n elements IMap <IRandomVariable, object> x = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do foreach (IRandomVariable Xi in bn.GetVariablesInTopologicalOrder()) { // x[i] <- a random sample from // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>)) x.Put(Xi, ProbUtil.randomSample(bn.GetNode(Xi), x, randomizer)); } // return x return(x); }
public virtual double posterior(IProposition phi, params IProposition[] evidence) { IProposition conjEvidence = ProbUtil.constructConjunction(evidence); // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e) IProposition aAndB = new ConjunctiveProposition(phi, conjEvidence); double probabilityOfEvidence = prior(conjEvidence); if (0 != probabilityOfEvidence) { return(prior(aAndB) / probabilityOfEvidence); } return(0); }
// function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of // <b>P</b>(X|e) /** * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries * given evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network specifying joint distribution * <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>) * @param N * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public ICategoricalDistribution likelihoodWeighting(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int N) { // local variables: W, a vector of weighted counts for each value of X, // initially zero double[] W = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)]; // for j = 1 to N do for (int j = 0; j < N; j++) { // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e) Pair <IMap <IRandomVariable, object>, double> x_w = weightedSample(bn, e); // W[x] <- W[x] + w where x is the value of X in <b>x</b> W[ProbUtil.indexOf(X, x_w.GetFirst())] += x_w.getSecond(); } // return NORMALIZE(W) return(new ProbabilityTable(W, X).normalize()); }
public CategoricalDistribution jointDistribution( params IProposition[] propositions) { ProbabilityTable d = null; IProposition conjProp = ProbUtil .constructConjunction(propositions); LinkedHashSet <RandomVariable> vars = new LinkedHashSet <RandomVariable>( conjProp.getUnboundScope()); if (vars.Count > 0) { RandomVariable[] distVars = new RandomVariable[vars.Count]; vars.CopyTo(distVars); ProbabilityTable ud = new ProbabilityTable(distVars); Object[] values = new Object[vars.Count]; //ProbabilityTable.Iterator di = new ProbabilityTable.Iterator() { // public void iterate(Map<RandomVariable, Object> possibleWorld, // double probability) { // if (conjProp.holds(possibleWorld)) { // int i = 0; // for (RandomVariable rv : vars) { // values[i] = possibleWorld.get(rv); // i++; // } // int dIdx = ud.getIndex(values); // ud.setValue(dIdx, ud.getValues()[dIdx] + probability); // } // } //}; //distribution.iterateOverTable(di); // TODO: d = ud; } else { // No Unbound Variables, therefore just return // the singular probability related to the proposition. d = new ProbabilityTable(); d.setValue(0, prior(propositions)); } return(d); }
public void test_randomVariableName() { string[] names = new[] { "B\ta\t\nf", "B___\n?", null, "a ", " b", "_A1", "Aa \tb c d e", "12asb", "33", "-A\t\b", "-_-" }; foreach (string name in names) { try { ProbUtil.checkValidRandomVariableName(name); Assert.Fail("Invalid name string not caught!"); } catch (Exception) { } } ProbUtil.checkValidRandomVariableName("A"); ProbUtil.checkValidRandomVariableName("A1"); ProbUtil.checkValidRandomVariableName("A1_2"); ProbUtil.checkValidRandomVariableName("A_a"); }
public virtual double prior(params IProposition[] phi) { // Calculating the prior, therefore no relevant evidence // just query over the scope of proposition phi in order // to get a joint distribution for these IProposition conjunct = ProbUtil.constructConjunction(phi); IRandomVariable[] X = conjunct.getScope().ToArray(); ICategoricalDistribution d = bayesInference.Ask(X, new AssignmentProposition[0], bayesNet); // Then calculate the probability of the propositions phi // be seeing where they hold. double[] probSum = new double[1]; CategoricalDistributionIterator di = new CategoricalDistributionIteraorPrior(conjunct, probSum); d.iterateOver(di); return(probSum[0]); }
/** * The population is re-sampled to generate a new population of N samples. * Each new sample is selected from the current population; the probability * that a particular sample is selected is proportional to its weight. The * new samples are un-weighted. * * @param N * the number of samples * @param S * a vector of samples of size N, where each sample is a vector * of assignment propositions for the X_1 state variables, which * is intended to represent the sample for time t * @param W * a vector of weights of size N * * @return a new vector of samples of size N sampled from S based on W */ private AssignmentProposition[][] weightedSampleWithReplacement(int N, AssignmentProposition[][] S, double[] W) { AssignmentProposition[][] newS = new AssignmentProposition[N][]; double[] normalizedW = Util.normalize(W); for (int i = 0; i < N; ++i) { newS[i] = new AssignmentProposition[this.dbn.GetX_0().Size()]; int sample = (int)ProbUtil.sample(randomizer.NextDouble(), sampleIndexes, normalizedW); for (int idx = 0; idx < S_tp1[i].Length; idx++) { AssignmentProposition ap = S_tp1[sample][idx]; newS[i][idx] = new AssignmentProposition(ap.getTermVariable(), ap.getValue()); } } return(newS); }
public ICategoricalDistribution forward(ICategoricalDistribution f1_t, ICollection <AssignmentProposition> e_tp1) { ICategoricalDistribution s1 = new ProbabilityTable(f1_t.getFor()); // Set up required working variables IProposition[] props = new IProposition[s1.getFor().Size()]; int i = 0; foreach (IRandomVariable rv in s1.getFor()) { props[i] = new RandVar(rv.getName(), rv.getDomain()); ++i; } IProposition Xtp1 = ProbUtil.constructConjunction(props); AssignmentProposition[] xt = new AssignmentProposition[tToTm1StateVarMap.Size()]; IMap <IRandomVariable, AssignmentProposition> xtVarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>(); i = 0; foreach (IRandomVariable rv in tToTm1StateVarMap.GetKeys()) { xt[i] = new AssignmentProposition(tToTm1StateVarMap.Get(rv), "<Dummy Value>"); xtVarAssignMap.Put(rv, xt[i]); ++i; } // Step 1: Calculate the 1 time step prediction // ∑<sub>x<sub>t</sub></sub> CategoricalDistributionIterator if1_t = new CategoricalDistributionIteratorImpl(transitionModel, xtVarAssignMap, s1, Xtp1, xt); f1_t.iterateOver(if1_t); // Step 2: multiply by the probability of the evidence // and normalize // <b>P</b>(e<sub>t+1</sub> | X<sub>t+1</sub>) ICategoricalDistribution s2 = sensorModel.posteriorDistribution(ProbUtil .constructConjunction(e_tp1.ToArray()), Xtp1); return(s2.multiplyBy(s1).normalize()); }
// function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of // <b>P</b>(X|e) /** * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries * given evidence in a Bayesian Network. * * @param X * the query variables * @param e * observed values for variables E * @param bn * a Bayesian network * @param Nsamples * the total number of samples to be generated * @return an estimate of <b>P</b>(X|e) */ public ICategoricalDistribution rejectionSampling(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int Nsamples) { // local variables: <b>N</b>, a vector of counts for each value of X, // initially zero double[] N = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)]; // for j = 1 to N do for (int j = 0; j < Nsamples; j++) { // <b>x</b> <- PRIOR-SAMPLE(bn) IMap <IRandomVariable, object> x = ps.priorSample(bn); // if <b>x</b> is consistent with e then if (isConsistent(x, e)) { // <b>N</b>[x] <- <b>N</b>[x] + 1 // where x is the value of X in <b>x</b> N[ProbUtil.indexOf(X, x)] += 1.0; } } // return NORMALIZE(<b>N</b>) return(new ProbabilityTable(N, X).normalize()); }
public ICategoricalDistribution backward(ICategoricalDistribution b_kp2t, ICollection <AssignmentProposition> e_kp1) { ICategoricalDistribution b_kp1t = new ProbabilityTable(b_kp2t.getFor()); // Set up required working variables IProposition[] props = new IProposition[b_kp1t.getFor().Size()]; int i = 0; foreach (IRandomVariable rv in b_kp1t.getFor()) { IRandomVariable prv = tToTm1StateVarMap.Get(rv); props[i] = new RandVar(prv.getName(), prv.getDomain()); ++i; } IProposition Xk = ProbUtil.constructConjunction(props); AssignmentProposition[] ax_kp1 = new AssignmentProposition[tToTm1StateVarMap.Size()]; IMap <IRandomVariable, AssignmentProposition> x_kp1VarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>(); i = 0; foreach (IRandomVariable rv in b_kp1t.getFor()) { ax_kp1[i] = new AssignmentProposition(rv, "<Dummy Value>"); x_kp1VarAssignMap.Put(rv, ax_kp1[i]); ++i; } IProposition x_kp1 = ProbUtil.constructConjunction(ax_kp1); props = e_kp1.ToArray(); IProposition pe_kp1 = ProbUtil.constructConjunction(props); // ∑<sub>x<sub>k+1</sub></sub> CategoricalDistributionIterator ib_kp2t = new CategoricalDistributionIteratorImpl2(x_kp1VarAssignMap, sensorModel, transitionModel, b_kp1t, pe_kp1, Xk, x_kp1); b_kp2t.iterateOver(ib_kp2t); return(b_kp1t); }
/** * The particle filtering algorithm implemented as a recursive update * operation with state (the set of samples). * * @param e * <b>e</b>, the new incoming evidence * @return a vector of samples of size N, where each sample is a vector of * assignment propositions for the X_1 state variables, which is * intended to represent the generated sample for time t. */ public AssignmentProposition[][] particleFiltering(AssignmentProposition[] e) { // local variables: W, a vector of weights of size N double[] W = new double[N]; // for i = 1 to N do for (int i = 0; i < N; ++i) { /* step 1 */ // S[i] <- sample from <b>P</b>(<b>X</b><sub>1</sub> | // <b>X</b><sub>0</sub> = S[i]) sampleFromTransitionModel(i); /* step 2 */ // W[i] <- <b>P</b>(<b>e</b> | <b>X</b><sub>1</sub> = S[i]) W[i] = sensorModel.posterior(ProbUtil.constructConjunction(e), S_tp1[i]); } /* step 3 */ // S <- WEIGHTED-SAMPLE-WITH-REPLACEMENT(N, S, W) S = weightedSampleWithReplacement(N, S, W); // return S return(S); }
public Object getSample(double probabilityChoice, params Object[] parentValues) { return(ProbUtil.sample(probabilityChoice, on, getConditioningCase(parentValues).getValues())); }
public void test_indexOf() { RandVar X = new RandVar("X", new BooleanDomain()); RandVar Y = new RandVar("Y", new ArbitraryTokenDomain("A", "B", "C")); RandVar Z = new RandVar("Z", new BooleanDomain()); // An ordered X,Y,Z enumeration of values should look like: // 00: true, A, true // 01: true, A, false // 02: true, B, true // 03: true, B, false // 04: true, C, true // 05: true, C, false // 06: false, A, true // 07: false, A, false // 08: false, B, true // 09: false, B, false // 10: false, C, true // 11: false, C, false IRandomVariable[] vars = new IRandomVariable[] { X, Y, Z }; IMap <IRandomVariable, object> even = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>(); even.Put(X, true); even.Put(Y, "A"); even.Put(Z, true); Assert.AreEqual(0, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(1, ProbUtil.indexOf(vars, even)); even.Put(Y, "B"); even.Put(Z, true); Assert.AreEqual(2, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(3, ProbUtil.indexOf(vars, even)); even.Put(Y, "C"); even.Put(Z, true); Assert.AreEqual(4, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(5, ProbUtil.indexOf(vars, even)); // even.Put(X, false); even.Put(Y, "A"); even.Put(Z, true); Assert.AreEqual(6, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(7, ProbUtil.indexOf(vars, even)); even.Put(Y, "B"); even.Put(Z, true); Assert.AreEqual(8, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(9, ProbUtil.indexOf(vars, even)); even.Put(Y, "C"); even.Put(Z, true); Assert.AreEqual(10, ProbUtil.indexOf(vars, even)); even.Put(Z, false); Assert.AreEqual(11, ProbUtil.indexOf(vars, even)); }
public virtual object GetSample(double probabilityChoice, params AssignmentProposition[] parentValues) { return(ProbUtil.sample(probabilityChoice, on, GetConditioningCase(parentValues).getValues())); }
public double prior(params IProposition[] phi) { return(probabilityOf(ProbUtil.constructConjunction(phi))); }