Пример #1
0
        // function PRIOR-SAMPLE(bn) returns an event sampled from the prior
        // specified by bn

        /**
         * The PRIOR-SAMPLE algorithm in Figure 14.13. A sampling algorithm that
         * generates events from a Bayesian network. Each variable is sampled
         * according to the conditional distribution given the values already
         * sampled for the variable's parents.
         *
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @return an event sampled from the prior specified by bn
         */
        public IMap <IRandomVariable, object> priorSample(IBayesianNetwork bn)
        {
            // x <- an event with n elements
            IMap <IRandomVariable, object> x = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, object>();

            // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do
            foreach (IRandomVariable Xi in bn.GetVariablesInTopologicalOrder())
            {
                // x[i] <- a random sample from
                // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>))
                x.Put(Xi, ProbUtil.randomSample(bn.GetNode(Xi), x, randomizer));
            }
            // return x
            return(x);
        }
Пример #2
0
        // function ENUMERATION-ASK(X, e, bn) returns a distribution over X

        /**
         * The ENUMERATION-ASK algorithm in Figure 14.9 evaluates expression trees
         * (Figure 14.8) using depth-first recursion.
         *
         * @param X
         *            the query variables.
         * @param observedEvidence
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @return a distribution over the query variables.
         */
        public ICategoricalDistribution enumerationAsk(IRandomVariable[] X,
                                                       AssignmentProposition[] observedEvidence,
                                                       IBayesianNetwork bn)
        {
            // Q(X) <- a distribution over X, initially empty
            ProbabilityTable Q = new ProbabilityTable(X);
            ObservedEvidence e = new ObservedEvidence(X, observedEvidence, bn);

            // for each value x<sub>i</sub> of X do
            ProbabilityTable.ProbabilityTableIterator di = new ProbabilityTableIteratorImpl(bn, Q, e, X, this);
            Q.iterateOverTable(di);

            // return NORMALIZE(Q(X))
            return(Q.normalize());
        }
Пример #3
0
        public void testLikelihoodWeighting_basic()
        {
            IBayesianNetwork bn = BayesNetExampleFactory.constructCloudySprinklerRainWetGrassNetwork();

            AssignmentProposition[] e = new AssignmentProposition[] { new AssignmentProposition(ExampleRV.SPRINKLER_RV, true) };
            MockRandomizer          r = new MockRandomizer(new double[] { 0.5, 0.5, 0.5, 0.5 });

            LikelihoodWeighting lw = new LikelihoodWeighting(r);

            double[] estimate = lw.likelihoodWeighting(
                new IRandomVariable[] { ExampleRV.RAIN_RV }, e, bn, 1000)
                                .getValues();

            assertArrayEquals(new double[] { 1.0, 0.0 }, estimate, DELTA_THRESHOLD);
        }
Пример #4
0
        /**
         * <b>Note:</b>Override this method for a more efficient implementation as
         * outlined in AIMA3e pgs. 527-28. The default implementation does not
         * perform any of these.<br>
         *
         * @param bn
         *            the Bayesian Network over which the query is being made. Note,
         *            is necessary to provide this in order to be able to determine
         *            the dependencies between variables.
         * @param vars
         *            a subset of the RandomVariables making up the Bayesian
         *            Network, with any irrelevant hidden variables alreay removed.
         * @return a possibly opimal ordering for the random variables to be
         *         iterated over by the algorithm. For example, one fairly effective
         *         ordering is a greedy one: eliminate whichever variable minimizes
         *         the size of the next factor to be constructed.
         */
        protected ICollection <IRandomVariable> order(IBayesianNetwork bn,
                                                      ICollection <IRandomVariable> vars)
        {
            // Note: Trivial Approach:
            // For simplicity just return in the reverse order received,
            // i.e. received will be the default topological order for
            // the Bayesian Network and we want to ensure the network
            // is iterated from bottom up to ensure when hidden variables
            // are come across all the factors dependent on them have
            // been seen so far.
            ICollection <IRandomVariable> order = CollectionFactory.CreateQueue <IRandomVariable>(vars);

            order.Reverse();

            return(order);
        }
Пример #5
0
        public void testInferenceOnToothacheCavityCatchNetwork()
        {
            IBayesianNetwork bn = BayesNetExampleFactory
                                  .constructToothacheCavityCatchNetwork();

            ICategoricalDistribution d = bayesInference.Ask(
                new IRandomVariable[] { ExampleRV.CAVITY_RV },
                new AssignmentProposition[] { }, bn);

            // System.Console.WriteLine("P(Cavity)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.2, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.8, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 493
            // P(Cavity | toothache) = <0.6, 0.4>
            d = bayesInference.Ask(new IRandomVariable[] { ExampleRV.CAVITY_RV },
                                   new AssignmentProposition[] { new AssignmentProposition(
                                                                     ExampleRV.TOOTHACHE_RV, true) }, bn);

            // System.Console.WriteLine("P(Cavity | toothache)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.6, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.4, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 497
            // P(Cavity | toothache AND catch) = <0.871, 0.129>
            d = bayesInference
                .Ask(new IRandomVariable[] { ExampleRV.CAVITY_RV },
                     new AssignmentProposition[] {
                new AssignmentProposition(
                    ExampleRV.TOOTHACHE_RV, true),
                new AssignmentProposition(ExampleRV.CATCH_RV,
                                          true)
            }, bn);

            // System.Console.WriteLine("P(Cavity | toothache, catch)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.8709677419354839, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.12903225806451615, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
        }
Пример #6
0
        public void testPriorSample_basic()
        {
            // AIMA3e pg. 530
            IBayesianNetwork bn = BayesNetExampleFactory
                                  .constructCloudySprinklerRainWetGrassNetwork();
            IRandom r = new MockRandomizer(
                new double[] { 0.5, 0.5, 0.5, 0.5 });

            PriorSample ps = new PriorSample(r);
            IMap <IRandomVariable, object> even = ps.priorSample(bn);

            Assert.AreEqual(4, even.GetKeys().Size());
            Assert.AreEqual(true, even.Get(ExampleRV.CLOUDY_RV));
            Assert.AreEqual(false, even.Get(ExampleRV.SPRINKLER_RV));
            Assert.AreEqual(true, even.Get(ExampleRV.RAIN_RV));
            Assert.AreEqual(true, even.Get(ExampleRV.WET_GRASS_RV));
        }
Пример #7
0
        // END-BayesInference
        //

        //
        // PROTECTED METHODS
        //

        /**
         * <b>Note:</b>Override this method for a more efficient implementation as
         * outlined in AIMA3e pgs. 527-28. Calculate the hidden variables from the
         * Bayesian Network. The default implementation does not perform any of
         * these.<br>
         * <br>
         * Two calcuations to be performed here in order to optimize iteration over
         * the Bayesian Network:<br>
         * 1. Calculate the hidden variables to be enumerated over. An optimization
         * (AIMA3e pg. 528) is to remove 'every variable that is not an ancestor of
         * a query variable or evidence variable as it is irrelevant to the query'
         * (i.e. sums to 1). 2. The subset of variables from the Bayesian Network to
         * be retained after irrelevant hidden variables have been removed.
         *
         * @param X
         *            the query variables.
         * @param e
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @param hidden
         *            to be populated with the relevant hidden variables Y.
         * @param bnVARS
         *            to be populated with the subset of the random variables
         *            comprising the Bayesian Network with any irrelevant hidden
         *            variables removed.
         */
        protected void calculateVariables(IRandomVariable[] X,
                                          AssignmentProposition[] e, IBayesianNetwork bn,
                                          ISet <IRandomVariable> hidden, ICollection <IRandomVariable> bnVARS)
        {
            bnVARS.AddAll(bn.GetVariablesInTopologicalOrder());
            hidden.AddAll(bnVARS);

            foreach (IRandomVariable x in X)
            {
                hidden.Remove(x);
            }
            foreach (AssignmentProposition ap in e)
            {
                hidden.RemoveAll(ap.getScope());
            }

            return;
        }
Пример #8
0
        //
        // PRIVATE METHODS
        //
        private IFactor makeFactor(IRandomVariable var, AssignmentProposition[] e, IBayesianNetwork bn)
        {
            INode n = bn.GetNode(var);

            if (!(n is IFiniteNode))
            {
                throw new IllegalArgumentException("Elimination-Ask only works with finite Nodes.");
            }
            IFiniteNode fn = (IFiniteNode)n;
            ICollection <AssignmentProposition> evidence = CollectionFactory.CreateQueue <AssignmentProposition>();

            foreach (AssignmentProposition ap in e)
            {
                if (fn.GetCPT().Contains(ap.getTermVariable()))
                {
                    evidence.Add(ap);
                }
            }

            return(fn.GetCPT().GetFactorFor(evidence.ToArray()));
        }
Пример #9
0
            public ObservedEvidence(IRandomVariable[] queryVariables,
                                    AssignmentProposition[] e, IBayesianNetwork bn)
            {
                this.bn = bn;

                int maxSize = bn.GetVariablesInTopologicalOrder().Size();

                extendedValues = new object[maxSize];
                var            = new IRandomVariable[maxSize];
                // query variables go first
                int idx = 0;

                for (int i = 0; i < queryVariables.Length; ++i)
                {
                    var[idx] = queryVariables[i];
                    varIdxs.Put(var[idx], idx);
                    idx++;
                }
                // initial evidence variables go next
                for (int i = 0; i < e.Length; ++i)
                {
                    var[idx] = e[i].getTermVariable();
                    varIdxs.Put(var[idx], idx);
                    extendedValues[idx] = e[i].getValue();
                    idx++;
                }
                extendedIdx = idx - 1;
                hiddenStart = idx;
                // the remaining slots are left open for the hidden variables
                foreach (IRandomVariable rv in bn.GetVariablesInTopologicalOrder())
                {
                    if (!varIdxs.ContainsKey(rv))
                    {
                        var[idx] = rv;
                        varIdxs.Put(var[idx], idx);
                        idx++;
                    }
                }
            }
Пример #10
0
 public ICategoricalDistribution Ask(IRandomVariable[] X,
                                     AssignmentProposition[] observedEvidence,
                                     IBayesianNetwork bn, int N)
 {
     return(rejectionSampling(X, observedEvidence, bn, N));
 }
Пример #11
0
        // function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)

        /**
         * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries
         * given evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */
        public ICategoricalDistribution rejectionSampling(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // <b>x</b> <- PRIOR-SAMPLE(bn)
                IMap <IRandomVariable, object> x = ps.priorSample(bn);
                // if <b>x</b> is consistent with e then
                if (isConsistent(x, e))
                {
                    // <b>N</b>[x] <- <b>N</b>[x] + 1
                    // where x is the value of X in <b>x</b>
                    N[ProbUtil.indexOf(X, x)] += 1.0;
                }
            }
            // return NORMALIZE(<b>N</b>)
            return(new ProbabilityTable(N, X).normalize());
        }
Пример #12
0
 //
 // START-BayesInference
 public ICategoricalDistribution Ask(IRandomVariable[] X,
                                     AssignmentProposition[] observedEvidence,
                                     IBayesianNetwork bn)
 {
     return(this.enumerationAsk(X, observedEvidence, bn));
 }
Пример #13
0
 public FiniteBayesModel(IBayesianNetwork bn)
     : this(bn, new EnumerationAsk())
 {
 }
Пример #14
0
        // function ELIMINATION-ASK(X, e, bn) returns a distribution over X

        /**
         * The ELIMINATION-ASK algorithm in Figure 14.11.
         *
         * @param X
         *            the query variables.
         * @param e
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @return a distribution over the query variables.
         */
        public ICategoricalDistribution eliminationAsk(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn)
        {
            ISet <IRandomVariable>        hidden = CollectionFactory.CreateSet <IRandomVariable>();
            ICollection <IRandomVariable> VARS   = CollectionFactory.CreateQueue <IRandomVariable>();

            calculateVariables(X, e, bn, hidden, VARS);

            // factors <- []
            ICollection <IFactor> factors = CollectionFactory.CreateQueue <IFactor>();

            // for each var in ORDER(bn.VARS) do
            foreach (IRandomVariable var in order(bn, VARS))
            {
                // factors <- [MAKE-FACTOR(var, e) | factors]
                factors.Insert(0, makeFactor(var, e, bn));
                // if var is hidden variable then factors <- SUM-OUT(var, factors)
                if (hidden.Contains(var))
                {
                    factors = sumOut(var, factors, bn);
                }
            }
            // return NORMALIZE(POINTWISE-PRODUCT(factors))
            IFactor product = pointwiseProduct(factors);

            // Note: Want to ensure the order of the product matches the
            // query variables
            return(((ProbabilityTable)product.pointwiseProductPOS(_identity, X)).normalize());
        }
Пример #15
0
        private ICollection <IFactor> sumOut(IRandomVariable var, ICollection <IFactor> factors, IBayesianNetwork bn)
        {
            ICollection <IFactor> summedOutFactors = CollectionFactory.CreateQueue <IFactor>();
            ICollection <IFactor> toMultiply       = CollectionFactory.CreateQueue <IFactor>();

            foreach (IFactor f in factors)
            {
                if (f.contains(var))
                {
                    toMultiply.Add(f);
                }
                else
                {
                    // This factor does not contain the variable
                    // so no need to sum out - see AIMA3e pg. 527.
                    summedOutFactors.Add(f);
                }
            }

            summedOutFactors.Add(pointwiseProduct(toMultiply).sumOut(var));

            return(summedOutFactors);
        }
Пример #16
0
        // function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)

        /**
         * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries
         * given evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param N
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */
        public ICategoricalDistribution likelihoodWeighting(IRandomVariable[] X, AssignmentProposition[] e, IBayesianNetwork bn, int N)
        {
            // local variables: W, a vector of weighted counts for each value of X,
            // initially zero
            double[] W = new double[ProbUtil.expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < N; j++)
            {
                // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e)
                Pair <IMap <IRandomVariable, object>, double> x_w = weightedSample(bn, e);
                // W[x] <- W[x] + w where x is the value of X in <b>x</b>
                W[ProbUtil.indexOf(X, x_w.GetFirst())] += x_w.getSecond();
            }
            // return NORMALIZE(W)
            return(new ProbabilityTable(W, X).normalize());
        }