Ejemplo n.º 1
0
        // function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e)

        /**
         * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given
         * evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution gibbsAsk(RandomVariable[] X,
                                                AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                                    .expectedSizeOfCategoricalDistribution(X)];
            // Z, the nonevidence variables in bn
            Set <RandomVariable> Z = new Set <RandomVariable>(
                bn.getVariablesInTopologicalOrder());

            foreach (AssignmentProposition ap in e)
            {
                Z.Remove(ap.getTermVariable());
            }
            // <b>x</b>, the current state of the network, initially copied from e
            Map <RandomVariable, Object> x = new LinkedHashMap <RandomVariable, Object>();

            foreach (AssignmentProposition ap in e)
            {
                x.Add(ap.getTermVariable(), ap.getValue());
            }

            // initialize <b>x</b> with random values for the variables in Z
            foreach (RandomVariable Zi in
                     Z)
            {
                x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer));
            }

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // for each Z<sub>i</sub> in Z do
                foreach (RandomVariable Zi in
                         Z)
                {
                    // set the value of Z<sub>i</sub> in <b>x</b> by sampling from
                    // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>))
                    x.put(Zi,
                          ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer));
                }
                // Note: moving this outside the previous for loop,
                // as described in fig 14.6, as will only work
                // correctly in the case of a single query variable X.
                // However, when multiple query variables, rare events
                // will get weighted incorrectly if done above. In case
                // of single variable this does not happen as each possible
                // value gets * |Z| above, ending up with the same ratios
                // when normalized (i.e. its still more efficient to place
                // outside the loop).
                //
                // <b>N</b>[x] <- <b>N</b>[x] + 1
                // where x is the value of X in <b>x</b>
                N[ProbUtil.indexOf(X, x)] += 1.0;
            }
            // return NORMALIZE(<b>N</b>)
            return(new ProbabilityTable(N, X).normalize());
        }
Ejemplo n.º 2
0
        // function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)

        /**
         * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries
         * given evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param N
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */
        public ICategoricalDistribution likelihoodWeighting(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int N)
        {
            // local variables: W, a vector of weighted counts for each value of X,
            // initially zero
            double[] W = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < N; j++)
            {
                // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e)
                Pair <IMap <IRandomVariable, object>, double> x_w = weightedSample(bn, e);
                // W[x] <- W[x] + w where x is the value of X in <b>x</b>
                W[ProbUtil.indexOf(X, x_w.GetFirst())] += x_w.getSecond();
            }
            // return NORMALIZE(W)
            return(new ProbabilityTable(W, X).normalize());
        }
Ejemplo n.º 3
0
        // function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)

        /**
         * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries
         * given evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */
        public ICategoricalDistribution rejectionSampling(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // <b>x</b> <- PRIOR-SAMPLE(bn)
                IMap <IRandomVariable, object> x = ps.priorSample(bn);
                // if <b>x</b> is consistent with e then
                if (isConsistent(x, e))
                {
                    // <b>N</b>[x] <- <b>N</b>[x] + 1
                    // where x is the value of X in <b>x</b>
                    N[ProbUtil.indexOf(X, x)] += 1.0;
                }
            }
            // return NORMALIZE(<b>N</b>)
            return(new ProbabilityTable(N, X).normalize());
        }
Ejemplo n.º 4
0
        public void test_indexOf()
        {
            RandVar X = new RandVar("X", new BooleanDomain());
            RandVar Y = new RandVar("Y", new ArbitraryTokenDomain("A", "B", "C"));
            RandVar Z = new RandVar("Z", new BooleanDomain());

            // An ordered X,Y,Z enumeration of values should look like:
            // 00: true, A, true
            // 01: true, A, false
            // 02: true, B, true
            // 03: true, B, false
            // 04: true, C, true
            // 05: true, C, false
            // 06: false, A, true
            // 07: false, A, false
            // 08: false, B, true
            // 09: false, B, false
            // 10: false, C, true
            // 11: false, C, false
            IRandomVariable[] vars = new IRandomVariable[] { X, Y, Z };
            IMap <IRandomVariable, object> even = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>();


            even.Put(X, true);

            even.Put(Y, "A");

            even.Put(Z, true);
            Assert.AreEqual(0, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(1, ProbUtil.indexOf(vars, even));

            even.Put(Y, "B");

            even.Put(Z, true);
            Assert.AreEqual(2, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(3, ProbUtil.indexOf(vars, even));

            even.Put(Y, "C");

            even.Put(Z, true);
            Assert.AreEqual(4, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(5, ProbUtil.indexOf(vars, even));
            //
            even.Put(X, false);

            even.Put(Y, "A");

            even.Put(Z, true);
            Assert.AreEqual(6, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(7, ProbUtil.indexOf(vars, even));

            even.Put(Y, "B");

            even.Put(Z, true);
            Assert.AreEqual(8, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(9, ProbUtil.indexOf(vars, even));

            even.Put(Y, "C");

            even.Put(Z, true);
            Assert.AreEqual(10, ProbUtil.indexOf(vars, even));

            even.Put(Z, false);
            Assert.AreEqual(11, ProbUtil.indexOf(vars, even));
        }