Esempio n. 1
0
        public virtual ICategoricalDistribution jointDistribution(params IProposition[] propositions)
        {
            ProbabilityTable       d        = null;
            IProposition           conjProp = ProbUtil.constructConjunction(propositions);
            ISet <IRandomVariable> vars     = CollectionFactory.CreateSet <IRandomVariable>(conjProp.getUnboundScope());

            if (vars.Size() > 0)
            {
                IRandomVariable[] distVars = new IRandomVariable[vars.Size()];
                int i = 0;
                foreach (IRandomVariable rv in vars)
                {
                    distVars[i] = rv;
                    ++i;
                }

                ProbabilityTable ud     = new ProbabilityTable(distVars);
                object[]         values = new object[vars.Size()];

                CategoricalDistributionIterator di = new CategoricalDistributionIteratorJointDistribution(conjProp, vars, ud, values);

                IRandomVariable[] X = conjProp.getScope().ToArray();
                bayesInference.Ask(X, new AssignmentProposition[0], bayesNet).iterateOver(di);

                d = ud;
            }
            else
            {
                // No Unbound Variables, therefore just return
                // the singular probability related to the proposition.
                d = new ProbabilityTable();
                d.setValue(0, prior(propositions));
            }
            return(d);
        }
        public ICategoricalDistribution jointDistribution(params IProposition[] propositions)
        {
            ProbabilityTable       d        = null;
            IProposition           conjProp = ProbUtil.constructConjunction(propositions);
            ISet <IRandomVariable> vars     = CollectionFactory.CreateSet <IRandomVariable>(conjProp.getUnboundScope());

            if (vars.Size() > 0)
            {
                IRandomVariable[] distVars = vars.ToArray();

                ProbabilityTable ud     = new ProbabilityTable(distVars);
                object[]         values = new object[vars.Size()];

                ProbabilityTable.ProbabilityTableIterator di = new ProbabilityTableIterator(conjProp, ud, values, vars);

                distribution.iterateOverTable(di);

                d = ud;
            }
            else
            {
                // No Unbound Variables, therefore just return
                // the singular probability related to the proposition.
                d = new ProbabilityTable();
                d.setValue(0, prior(propositions));
            }
            return(d);
        }
Esempio n. 3
0
        public virtual ICategoricalDistribution posteriorDistribution(IProposition phi, params IProposition[] evidence)
        {
            IProposition conjEvidence = ProbUtil.constructConjunction(evidence);

            // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e)
            ICategoricalDistribution dAandB    = jointDistribution(phi, conjEvidence);
            ICategoricalDistribution dEvidence = jointDistribution(conjEvidence);

            ICategoricalDistribution rVal = dAandB.divideBy(dEvidence);
            // Note: Need to ensure normalize() is called
            // in order to handle the case where an approximate
            // algorithm is used (i.e. won't evenly divide
            // as will have calculated on separate approximate
            // runs). However, this should only be done
            // if the all of the evidences scope are bound (if not
            // you are returning in essence a set of conditional
            // distributions, which you do not want normalized).
            bool unboundEvidence = false;

            foreach (IProposition e in evidence)
            {
                if (e.getUnboundScope().Size() > 0)
                {
                    unboundEvidence = true;
                    break;
                }
            }
            if (!unboundEvidence)
            {
                rVal.normalize();
            }

            return(rVal);
        }
        public ICategoricalDistribution posteriorDistribution(IProposition phi,
                                                              params IProposition[] evidence)
        {
            IProposition conjEvidence = ProbUtil.constructConjunction(evidence);

            // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e)
            ICategoricalDistribution dAandB    = jointDistribution(phi, conjEvidence);
            ICategoricalDistribution dEvidence = jointDistribution(conjEvidence);

            return(dAandB.divideBy(dEvidence));
        }
Esempio n. 5
0
        public virtual double posterior(IProposition phi, params IProposition[] evidence)
        {
            IProposition conjEvidence = ProbUtil.constructConjunction(evidence);

            // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e)
            IProposition aAndB = new ConjunctiveProposition(phi, conjEvidence);
            double       probabilityOfEvidence = prior(conjEvidence);

            if (0 != probabilityOfEvidence)
            {
                return(prior(aAndB) / probabilityOfEvidence);
            }

            return(0);
        }
Esempio n. 6
0
        public CategoricalDistribution jointDistribution(
            params IProposition[] propositions)
        {
            ProbabilityTable d        = null;
            IProposition     conjProp = ProbUtil
                                        .constructConjunction(propositions);
            LinkedHashSet <RandomVariable> vars = new LinkedHashSet <RandomVariable>(
                conjProp.getUnboundScope());

            if (vars.Count > 0)
            {
                RandomVariable[] distVars = new RandomVariable[vars.Count];
                vars.CopyTo(distVars);

                ProbabilityTable ud     = new ProbabilityTable(distVars);
                Object[]         values = new Object[vars.Count];

                //ProbabilityTable.Iterator di = new ProbabilityTable.Iterator() {

                //    public void iterate(Map<RandomVariable, Object> possibleWorld,
                //            double probability) {
                //        if (conjProp.holds(possibleWorld)) {
                //            int i = 0;
                //            for (RandomVariable rv : vars) {
                //                values[i] = possibleWorld.get(rv);
                //                i++;
                //            }
                //            int dIdx = ud.getIndex(values);
                //            ud.setValue(dIdx, ud.getValues()[dIdx] + probability);
                //        }
                //    }
                //};

                //distribution.iterateOverTable(di);
                // TODO:
                d = ud;
            }
            else
            {
                // No Unbound Variables, therefore just return
                // the singular probability related to the proposition.
                d = new ProbabilityTable();
                d.setValue(0, prior(propositions));
            }
            return(d);
        }
Esempio n. 7
0
        public virtual double prior(params IProposition[] phi)
        {
            // Calculating the prior, therefore no relevant evidence
            // just query over the scope of proposition phi in order
            // to get a joint distribution for these
            IProposition conjunct = ProbUtil.constructConjunction(phi);

            IRandomVariable[]        X = conjunct.getScope().ToArray();
            ICategoricalDistribution d = bayesInference.Ask(X, new AssignmentProposition[0], bayesNet);

            // Then calculate the probability of the propositions phi
            // be seeing where they hold.
            double[] probSum = new double[1];
            CategoricalDistributionIterator di = new CategoricalDistributionIteraorPrior(conjunct, probSum);

            d.iterateOver(di);

            return(probSum[0]);
        }
Esempio n. 8
0
        public ICategoricalDistribution forward(ICategoricalDistribution f1_t, ICollection <AssignmentProposition> e_tp1)
        {
            ICategoricalDistribution s1 = new ProbabilityTable(f1_t.getFor());

            // Set up required working variables
            IProposition[] props = new IProposition[s1.getFor().Size()];
            int            i     = 0;

            foreach (IRandomVariable rv in s1.getFor())
            {
                props[i] = new RandVar(rv.getName(), rv.getDomain());
                ++i;
            }
            IProposition Xtp1 = ProbUtil.constructConjunction(props);

            AssignmentProposition[] xt = new AssignmentProposition[tToTm1StateVarMap.Size()];
            IMap <IRandomVariable, AssignmentProposition> xtVarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>();

            i = 0;
            foreach (IRandomVariable rv in tToTm1StateVarMap.GetKeys())
            {
                xt[i] = new AssignmentProposition(tToTm1StateVarMap.Get(rv), "<Dummy Value>");
                xtVarAssignMap.Put(rv, xt[i]);
                ++i;
            }

            // Step 1: Calculate the 1 time step prediction
            // &sum;<sub>x<sub>t</sub></sub>
            CategoricalDistributionIterator if1_t = new CategoricalDistributionIteratorImpl(transitionModel,
                                                                                            xtVarAssignMap, s1, Xtp1, xt);

            f1_t.iterateOver(if1_t);

            // Step 2: multiply by the probability of the evidence
            // and normalize
            // <b>P</b>(e<sub>t+1</sub> | X<sub>t+1</sub>)
            ICategoricalDistribution s2 = sensorModel.posteriorDistribution(ProbUtil
                                                                            .constructConjunction(e_tp1.ToArray()), Xtp1);

            return(s2.multiplyBy(s1).normalize());
        }
Esempio n. 9
0
        public ICategoricalDistribution backward(ICategoricalDistribution b_kp2t, ICollection <AssignmentProposition> e_kp1)
        {
            ICategoricalDistribution b_kp1t = new ProbabilityTable(b_kp2t.getFor());

            // Set up required working variables
            IProposition[] props = new IProposition[b_kp1t.getFor().Size()];
            int            i     = 0;

            foreach (IRandomVariable rv in b_kp1t.getFor())
            {
                IRandomVariable prv = tToTm1StateVarMap.Get(rv);
                props[i] = new RandVar(prv.getName(), prv.getDomain());
                ++i;
            }
            IProposition Xk = ProbUtil.constructConjunction(props);

            AssignmentProposition[] ax_kp1 = new AssignmentProposition[tToTm1StateVarMap.Size()];
            IMap <IRandomVariable, AssignmentProposition> x_kp1VarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>();

            i = 0;
            foreach (IRandomVariable rv in b_kp1t.getFor())
            {
                ax_kp1[i] = new AssignmentProposition(rv, "<Dummy Value>");
                x_kp1VarAssignMap.Put(rv, ax_kp1[i]);
                ++i;
            }
            IProposition x_kp1 = ProbUtil.constructConjunction(ax_kp1);

            props = e_kp1.ToArray();
            IProposition pe_kp1 = ProbUtil.constructConjunction(props);

            // &sum;<sub>x<sub>k+1</sub></sub>
            CategoricalDistributionIterator ib_kp2t = new CategoricalDistributionIteratorImpl2(x_kp1VarAssignMap,
                                                                                               sensorModel, transitionModel, b_kp1t, pe_kp1, Xk, x_kp1);

            b_kp2t.iterateOver(ib_kp2t);

            return(b_kp1t);
        }
Esempio n. 10
0
        /**
         * The particle filtering algorithm implemented as a recursive update
         * operation with state (the set of samples).
         *
         * @param e
         *            <b>e</b>, the new incoming evidence
         * @return a vector of samples of size N, where each sample is a vector of
         *         assignment propositions for the X_1 state variables, which is
         *         intended to represent the generated sample for time t.
         */
        public AssignmentProposition[][] particleFiltering(AssignmentProposition[] e)
        {
            // local variables: W, a vector of weights of size N
            double[] W = new double[N];

            // for i = 1 to N do
            for (int i = 0; i < N; ++i)
            {
                /* step 1 */
                // S[i] <- sample from <b>P</b>(<b>X</b><sub>1</sub> |
                // <b>X</b><sub>0</sub> = S[i])
                sampleFromTransitionModel(i);
                /* step 2 */
                // W[i] <- <b>P</b>(<b>e</b> | <b>X</b><sub>1</sub> = S[i])
                W[i] = sensorModel.posterior(ProbUtil.constructConjunction(e), S_tp1[i]);
            }
            /* step 3 */
            // S <- WEIGHTED-SAMPLE-WITH-REPLACEMENT(N, S, W)
            S = weightedSampleWithReplacement(N, S, W);

            // return S
            return(S);
        }
 public double prior(params IProposition[] phi)
 {
     return(probabilityOf(ProbUtil.constructConjunction(phi)));
 }