//
        // START-ForwardBackwardInference

        public virtual ICollection <ICategoricalDistribution> forwardBackward(ICollection <ICollection <AssignmentProposition> > ev,
                                                                              ICategoricalDistribution prior)
        {
            // local variables: fv, a vector of forward messages for steps 0,...,t
            ICollection <Matrix> fv = CollectionFactory.CreateQueue <Matrix>();
            // b, a representation of the backward message, initially all 1s
            Matrix b = hmm.createUnitMessage();
            // sv, a vector of smoothed estimates for steps 1,...,t
            ICollection <Matrix> sv = CollectionFactory.CreateQueue <Matrix>();

            // fv[0] <- prior
            fv.Add(hmm.convert(prior));
            // for i = 1 to t do
            for (int i = 0; i < ev.Size(); ++i)
            {
                // fv[i] <- FORWARD(fv[i-1], ev[i])
                fv.Add(forward(fv.Get(i), hmm.getEvidence(ev.Get(i))));
            }
            // for i = t downto 1 do
            for (int i = ev.Size() - 1; i >= 0; i--)
            {
                // sv[i] <- NORMALIZE(fv[i] * b)
                sv.Insert(0, hmm.normalize(fv.Get(i + 1).ArrayTimes(b)));
                // b <- BACKWARD(b, ev[i])
                b = backward(b, hmm.getEvidence(ev.Get(i)));
            }

            // return sv
            return(hmm.convert(sv));
        }
Exemple #2
0
        public virtual ICategoricalDistribution posteriorDistribution(IProposition phi, params IProposition[] evidence)
        {
            IProposition conjEvidence = ProbUtil.constructConjunction(evidence);

            // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e)
            ICategoricalDistribution dAandB    = jointDistribution(phi, conjEvidence);
            ICategoricalDistribution dEvidence = jointDistribution(conjEvidence);

            ICategoricalDistribution rVal = dAandB.divideBy(dEvidence);
            // Note: Need to ensure normalize() is called
            // in order to handle the case where an approximate
            // algorithm is used (i.e. won't evenly divide
            // as will have calculated on separate approximate
            // runs). However, this should only be done
            // if the all of the evidences scope are bound (if not
            // you are returning in essence a set of conditional
            // distributions, which you do not want normalized).
            bool unboundEvidence = false;

            foreach (IProposition e in evidence)
            {
                if (e.getUnboundScope().Size() > 0)
                {
                    unboundEvidence = true;
                    break;
                }
            }
            if (!unboundEvidence)
            {
                rVal.normalize();
            }

            return(rVal);
        }
Exemple #3
0
        protected void testForwardStep_UmbrellaWorld(IForwardStepInference uw)
        {
            // AIMA3e pg. 572
            // Day 0, no observations only the security guards prior beliefs
            // P(R<sub>0</sub>) = <0.5, 0.5>
            ICategoricalDistribution prior = new ProbabilityTable(new double[] { 0.5, 0.5 }, ExampleRV.RAIN_t_RV);

            // Day 1, the umbrella appears, so U<sub>1</sub> = true.
            // &asymp; <0.818, 0.182>
            ICollection <AssignmentProposition> e1 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e1.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));
            ICategoricalDistribution f1 = uw.forward(prior, e1);

            assertArrayEquals(new double[] { 0.818, 0.182 }, f1.getValues(), DELTA_THRESHOLD);

            // Day 2, the umbrella appears, so U<sub>2</sub> = true.
            // &asymp; <0.883, 0.117>
            ICollection <AssignmentProposition> e2 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));
            ICategoricalDistribution f2 = uw.forward(f1, e2);

            assertArrayEquals(new double[] { 0.883, 0.117 }, f2.getValues(),
                              DELTA_THRESHOLD);
        }
Exemple #4
0
        public void testFixedLagSmoothing_lag_2_UmbrellaWorld()
        {
            FixedLagSmoothing uw = new FixedLagSmoothing(HMMExampleFactory.getUmbrellaWorldModel(), 2);

            // Day 1 - Lag 2
            ICollection <AssignmentProposition> e1 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e1.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            ICategoricalDistribution smoothed = uw.fixedLagSmoothing(e1);

            Assert.IsNull(smoothed);

            // Day 2 - Lag 2
            ICollection <AssignmentProposition> e2 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            smoothed = uw.fixedLagSmoothing(e2);
            Assert.IsNull(smoothed);

            // Day 3 - Lag 2
            ICollection <AssignmentProposition> e3 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e3.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false));

            smoothed = uw.fixedLagSmoothing(e3);

            Assert.IsNotNull(smoothed);
            assertArrayEquals(new double[] { 0.861, 0.138 }, smoothed.getValues(), DELTA_THRESHOLD);
        }
        // function FORWARD-BACKWARD(ev, prior) returns a vector of probability distributions
        public ICollection <ICategoricalDistribution> forwardBackward(
            ICollection <ICollection <AssignmentProposition> > ev, ICategoricalDistribution prior)
        {
            // local variables: fv, a vector of forward messages for steps 0,...,t
            ICollection <ICategoricalDistribution> fv = CollectionFactory.CreateQueue <ICategoricalDistribution>();
            // b, a representation of the backward message, initially all 1s
            ICategoricalDistribution b = initBackwardMessage();
            // sv, a vector of smoothed estimates for steps 1,...,t
            ICollection <ICategoricalDistribution> sv = CollectionFactory.CreateQueue <ICategoricalDistribution>();

            // fv[0] <- prior
            fv.Add(prior);
            // for i = 1 to t do
            for (int i = 0; i < ev.Size(); ++i)
            {
                // fv[i] <- FORWARD(fv[i-1], ev[i])
                fv.Add(forward(fv.Get(i), ev.Get(i)));
            }
            // for i = t downto 1 do
            for (int i = ev.Size() - 1; i >= 0; i--)
            {
                // sv[i] <- NORMALIZE(fv[i] * b)
                sv.Insert(0, fv.Get(i + 1).multiplyBy(b).normalize());
                // b <- BACKWARD(b, ev[i])
                b = backward(b, ev.Get(i));
            }

            // return sv
            return(sv);
        }
 public CategoricalDistributionIteratorImpl(IFiniteProbabilityModel transitionModel, IMap <IRandomVariable, AssignmentProposition> xtVarAssignMap, ICategoricalDistribution s1, IProposition xtp1, AssignmentProposition[] xt)
 {
     this.transitionModel = transitionModel;
     this.xtVarAssignMap  = xtVarAssignMap;
     this.s1   = s1;
     this.xtp1 = xtp1;
     this.xt   = xt;
 }
Exemple #7
0
        public void testInferenceOnBurglaryAlarmNetwork()
        {
            IBayesianNetwork bn = BayesNetExampleFactory
                                  .constructBurglaryAlarmNetwork();

            // AIMA3e. pg. 514
            ICategoricalDistribution d = bayesInference
                                         .Ask(new IRandomVariable[] { ExampleRV.ALARM_RV },
                                              new AssignmentProposition[] {
                new AssignmentProposition(
                    ExampleRV.BURGLARY_RV, false),
                new AssignmentProposition(
                    ExampleRV.EARTHQUAKE_RV, false),
                new AssignmentProposition(
                    ExampleRV.JOHN_CALLS_RV, true),
                new AssignmentProposition(
                    ExampleRV.MARY_CALLS_RV, true)
            }, bn);

            // System.Console.WriteLine("P(Alarm | ~b, ~e, j, m)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.5577689243027888, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.44223107569721115, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 523
            // P(Burglary | JohnCalls = true, MaryCalls = true) = <0.284, 0.716>
            d = bayesInference
                .Ask(new IRandomVariable[] { ExampleRV.BURGLARY_RV },
                     new AssignmentProposition[] {
                new AssignmentProposition(
                    ExampleRV.JOHN_CALLS_RV, true),
                new AssignmentProposition(
                    ExampleRV.MARY_CALLS_RV, true)
            }, bn);

            // System.Console.WriteLine("P(Burglary | j, m)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.2841718353643929, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.7158281646356071, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 528
            // P(JohnCalls | Burglary = true)
            d = bayesInference.Ask(
                new IRandomVariable[] { ExampleRV.JOHN_CALLS_RV },
                new AssignmentProposition[] { new AssignmentProposition(
                                                  ExampleRV.BURGLARY_RV, true) }, bn);
            // System.Console.WriteLine("P(JohnCalls | b)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.8490169999999999, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.15098299999999998, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
        }
 public CategoricalDistributionIteratorImpl2(IMap <IRandomVariable, AssignmentProposition> x_kp1VarAssignMap, IFiniteProbabilityModel sensorModel, IFiniteProbabilityModel transitionModel, ICategoricalDistribution b_kp1t, IProposition pe_kp1, IProposition xk, IProposition x_kp1)
 {
     this.x_kp1VarAssignMap = x_kp1VarAssignMap;
     this.sensorModel       = sensorModel;
     this.transitionModel   = transitionModel;
     this.b_kp1t            = b_kp1t;
     this.pe_kp1            = pe_kp1;
     this.xk    = xk;
     this.x_kp1 = x_kp1;
 }
        public ICategoricalDistribution posteriorDistribution(IProposition phi,
                                                              params IProposition[] evidence)
        {
            IProposition conjEvidence = ProbUtil.constructConjunction(evidence);

            // P(A | B) = P(A AND B)/P(B) - (13.3 AIMA3e)
            ICategoricalDistribution dAandB    = jointDistribution(phi, conjEvidence);
            ICategoricalDistribution dEvidence = jointDistribution(conjEvidence);

            return(dAandB.divideBy(dEvidence));
        }
Exemple #10
0
        protected void testBackwardStep_UmbrellaWorld(IBackwardStepInference uw)
        {
            // AIMA3e pg. 575
            ICategoricalDistribution            b_kp2t = new ProbabilityTable(new double[] { 1.0, 1.0 }, ExampleRV.RAIN_t_RV);
            ICollection <AssignmentProposition> e2     = CollectionFactory.CreateQueue <AssignmentProposition>();

            e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));
            ICategoricalDistribution b1 = uw.backward(b_kp2t, e2);

            assertArrayEquals(new double[] { 0.69, 0.41 }, b1.getValues(), DELTA_THRESHOLD);
        }
Exemple #11
0
        public void testInferenceOnToothacheCavityCatchNetwork()
        {
            IBayesianNetwork bn = BayesNetExampleFactory
                                  .constructToothacheCavityCatchNetwork();

            ICategoricalDistribution d = bayesInference.Ask(
                new IRandomVariable[] { ExampleRV.CAVITY_RV },
                new AssignmentProposition[] { }, bn);

            // System.Console.WriteLine("P(Cavity)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.2, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.8, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 493
            // P(Cavity | toothache) = <0.6, 0.4>
            d = bayesInference.Ask(new IRandomVariable[] { ExampleRV.CAVITY_RV },
                                   new AssignmentProposition[] { new AssignmentProposition(
                                                                     ExampleRV.TOOTHACHE_RV, true) }, bn);

            // System.Console.WriteLine("P(Cavity | toothache)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.6, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.4, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);

            // AIMA3e pg. 497
            // P(Cavity | toothache AND catch) = <0.871, 0.129>
            d = bayesInference
                .Ask(new IRandomVariable[] { ExampleRV.CAVITY_RV },
                     new AssignmentProposition[] {
                new AssignmentProposition(
                    ExampleRV.TOOTHACHE_RV, true),
                new AssignmentProposition(ExampleRV.CATCH_RV,
                                          true)
            }, bn);

            // System.Console.WriteLine("P(Cavity | toothache, catch)=" + d);
            Assert.AreEqual(2, d.getValues().Length);
            Assert.AreEqual(0.8709677419354839, d.getValues()[0],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
            Assert.AreEqual(0.12903225806451615, d.getValues()[1],
                            ProbabilityModelImpl.DEFAULT_ROUNDING_THRESHOLD);
        }
Exemple #12
0
        // AIMA3e pg. 496
        protected void test_MeningitisStiffNeckModel_Distributions(
            IFiniteProbabilityModel model)
        {
            AssignmentProposition astiffNeck = new AssignmentProposition(
                ExampleRV.STIFF_NECK_RV, true);

            // AIMA3e pg. 497
            // P<>(Mengingitis | stiffneck) = &alpha;<P(s | m)P(m), P(s | ~m)P(~m)>
            ICategoricalDistribution dMeningitisGivenStiffNeck = model
                                                                 .posteriorDistribution(ExampleRV.MENINGITIS_RV, astiffNeck);

            Assert.AreEqual(2, dMeningitisGivenStiffNeck.getValues().Length);
            Assert.AreEqual(0.0014, dMeningitisGivenStiffNeck.getValues()[0],
                            DELTA_THRESHOLD);
            Assert.AreEqual(0.9986, dMeningitisGivenStiffNeck.getValues()[1],
                            DELTA_THRESHOLD);
        }
Exemple #13
0
        public virtual double prior(params IProposition[] phi)
        {
            // Calculating the prior, therefore no relevant evidence
            // just query over the scope of proposition phi in order
            // to get a joint distribution for these
            IProposition conjunct = ProbUtil.constructConjunction(phi);

            IRandomVariable[]        X = conjunct.getScope().ToArray();
            ICategoricalDistribution d = bayesInference.Ask(X, new AssignmentProposition[0], bayesNet);

            // Then calculate the probability of the propositions phi
            // be seeing where they hold.
            double[] probSum = new double[1];
            CategoricalDistributionIterator di = new CategoricalDistributionIteraorPrior(conjunct, probSum);

            d.iterateOver(di);

            return(probSum[0]);
        }
Exemple #14
0
        /**
         * Algorithm for smoothing with a fixed time lag of d steps, implemented as
         * an online algorithm that outputs the new smoothed estimate given the
         * observation for a new time step.
         *
         * @param et
         *            the current evidence from time step t
         * @return a distribution over <b>X</b><sub>t-d</sub>
         */
        public ICategoricalDistribution fixedLagSmoothing(ICollection <AssignmentProposition> et)
        {
            // local variables: <b>O</b><sub>t-d</sub>, <b>O</b><sub>t</sub>,
            // diagonal matrices containing the sensor model information
            Matrix O_tmd, O_t;

            // add e<sub>t</sub> to the end of e<sub>t-d:t</sub>
            e_tmd_to_t.Add(hmm.getEvidence(et));
            // <b>O</b><sub>t</sub> <- diagonal matrix containing
            // <b>P</b>(e<sub>t</sub> | X<sub>t</sub>)
            O_t = e_tmd_to_t.Get(e_tmd_to_t.Size() - 1);
            // if t > d then
            if (t > d)
            {
                // remove e<sub>t-d-1</sub> from the beginning of e<sub>t-d:t</sub>
                e_tmd_to_t.RemoveAt(0);
                // <b>O</b><sub>t-d</sub> <- diagonal matrix containing
                // <b>P</b>(e<sub>t-d</sub> | X<sub>t-d</sub>)
                O_tmd = e_tmd_to_t.Get(0);
                // <b>f</b> <- FORWARD(<b>f</b>, e<sub>t-d</sub>)
                f = forward(f, O_tmd);
                // <b>B</b> <-
                // <b>O</b><sup>-1</sup><sub>t-d</sub><b>B</b><b>T</b><b>O</b><sub>t</sub>
                B = O_tmd.Inverse().Times(hmm.getTransitionModel().Inverse()).Times(B).Times(hmm.getTransitionModel()).Times(O_t);
            }
            else
            {
                // else <b>B</b> <- <b>BTO</b><sub>t</sub>
                B = B.Times(hmm.getTransitionModel()).Times(O_t);
            }

            // if t > d then return NORMALIZE(<b>f</b> * <b>B1</b>) else return null
            ICategoricalDistribution rVal = null;

            if (t > d)
            {
                rVal = hmm.convert(hmm.normalize(f.ArrayTimes(B.Times(unitMessage))));
            }
            // t <- t + 1
            t = t + 1;
            return(rVal);
        }
        public ICategoricalDistribution forward(ICategoricalDistribution f1_t, ICollection <AssignmentProposition> e_tp1)
        {
            ICategoricalDistribution s1 = new ProbabilityTable(f1_t.getFor());

            // Set up required working variables
            IProposition[] props = new IProposition[s1.getFor().Size()];
            int            i     = 0;

            foreach (IRandomVariable rv in s1.getFor())
            {
                props[i] = new RandVar(rv.getName(), rv.getDomain());
                ++i;
            }
            IProposition Xtp1 = ProbUtil.constructConjunction(props);

            AssignmentProposition[] xt = new AssignmentProposition[tToTm1StateVarMap.Size()];
            IMap <IRandomVariable, AssignmentProposition> xtVarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>();

            i = 0;
            foreach (IRandomVariable rv in tToTm1StateVarMap.GetKeys())
            {
                xt[i] = new AssignmentProposition(tToTm1StateVarMap.Get(rv), "<Dummy Value>");
                xtVarAssignMap.Put(rv, xt[i]);
                ++i;
            }

            // Step 1: Calculate the 1 time step prediction
            // &sum;<sub>x<sub>t</sub></sub>
            CategoricalDistributionIterator if1_t = new CategoricalDistributionIteratorImpl(transitionModel,
                                                                                            xtVarAssignMap, s1, Xtp1, xt);

            f1_t.iterateOver(if1_t);

            // Step 2: multiply by the probability of the evidence
            // and normalize
            // <b>P</b>(e<sub>t+1</sub> | X<sub>t+1</sub>)
            ICategoricalDistribution s2 = sensorModel.posteriorDistribution(ProbUtil
                                                                            .constructConjunction(e_tp1.ToArray()), Xtp1);

            return(s2.multiplyBy(s1).normalize());
        }
        public ICategoricalDistribution backward(ICategoricalDistribution b_kp2t, ICollection <AssignmentProposition> e_kp1)
        {
            ICategoricalDistribution b_kp1t = new ProbabilityTable(b_kp2t.getFor());

            // Set up required working variables
            IProposition[] props = new IProposition[b_kp1t.getFor().Size()];
            int            i     = 0;

            foreach (IRandomVariable rv in b_kp1t.getFor())
            {
                IRandomVariable prv = tToTm1StateVarMap.Get(rv);
                props[i] = new RandVar(prv.getName(), prv.getDomain());
                ++i;
            }
            IProposition Xk = ProbUtil.constructConjunction(props);

            AssignmentProposition[] ax_kp1 = new AssignmentProposition[tToTm1StateVarMap.Size()];
            IMap <IRandomVariable, AssignmentProposition> x_kp1VarAssignMap = CollectionFactory.CreateInsertionOrderedMap <IRandomVariable, AssignmentProposition>();

            i = 0;
            foreach (IRandomVariable rv in b_kp1t.getFor())
            {
                ax_kp1[i] = new AssignmentProposition(rv, "<Dummy Value>");
                x_kp1VarAssignMap.Put(rv, ax_kp1[i]);
                ++i;
            }
            IProposition x_kp1 = ProbUtil.constructConjunction(ax_kp1);

            props = e_kp1.ToArray();
            IProposition pe_kp1 = ProbUtil.constructConjunction(props);

            // &sum;<sub>x<sub>k+1</sub></sub>
            CategoricalDistributionIterator ib_kp2t = new CategoricalDistributionIteratorImpl2(x_kp1VarAssignMap,
                                                                                               sensorModel, transitionModel, b_kp1t, pe_kp1, Xk, x_kp1);

            b_kp2t.iterateOver(ib_kp2t);

            return(b_kp1t);
        }
        public override ICollection <ICategoricalDistribution> forwardBackward(ICollection <ICollection <AssignmentProposition> > ev, ICategoricalDistribution prior)
        {
            // local variables: f, the forward message <- prior
            Matrix f = hmm.convert(prior);
            // b, a representation of the backward message, initially all 1s
            Matrix b = hmm.createUnitMessage();
            // sv, a vector of smoothed estimates for steps 1,...,t
            ICollection <Matrix> sv = CollectionFactory.CreateQueue <Matrix>();

            // for i = 1 to t do
            for (int i = 0; i < ev.Size(); ++i)
            {
                // fv[i] <- FORWARD(fv[i-1], ev[i])
                f = forward(f, hmm.getEvidence(ev.Get(i)));
            }
            // for i = t downto 1 do
            for (int i = ev.Size() - 1; i >= 0; i--)
            {
                // sv[i] <- NORMALIZE(fv[i] * b)
                sv.Insert(0, hmm.normalize(f.ArrayTimes(b)));
                Matrix e = hmm.getEvidence(ev.Get(i));
                // b <- BACKWARD(b, ev[i])
                b = backward(b, e);
                // f1:t <-
                // NORMALIZE((T<sup>T<sup>)<sup>-1</sup>O<sup>-1</sup><sub>t+1</sub>f<sub>1:t+1</sub>)
                f = forwardRecover(e, f);
            }

            // return sv
            return(hmm.convert(sv));
        }
 public ICategoricalDistribution divideBy(ICategoricalDistribution divisor)
 {
     return(divideBy((ProbabilityTable)divisor));
 }
 public virtual ICategoricalDistribution forward(ICategoricalDistribution f1_t, ICollection <AssignmentProposition> e_tp1)
 {
     return(hmm.convert(forward(hmm.convert(f1_t), hmm.getEvidence(e_tp1))));
 }
 public virtual ICategoricalDistribution backward(ICategoricalDistribution b_kp2t, ICollection <AssignmentProposition> e_kp1)
 {
     return(hmm.convert(backward(hmm.convert(b_kp2t), hmm.getEvidence(e_kp1))));
 }
Exemple #21
0
        static void fixedLagSmoothingDemo()
        {
            System.Console.WriteLine("DEMO: Fixed-Lag-Smoothing");
            System.Console.WriteLine("=========================");
            System.Console.WriteLine("Lag = 1");
            System.Console.WriteLine("-------");
            FixedLagSmoothing uw = new FixedLagSmoothing(HMMExampleFactory.getUmbrellaWorldModel(), 1);

            // Day 1 - Lag 1
            ICollection <AssignmentProposition> e1 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e1.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            ICategoricalDistribution smoothed = uw.fixedLagSmoothing(e1);

            System.Console.WriteLine("Day 1 (Umbrella_t=true) smoothed:\nday 1=" + smoothed);

            // Day 2 - Lag 1
            ICollection <AssignmentProposition> e2 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            smoothed = uw.fixedLagSmoothing(e2);

            System.Console.WriteLine("Day 2 (Umbrella_t=true) smoothed:\nday 1=" + smoothed);

            // Day 3 - Lag 1
            ICollection <AssignmentProposition> e3 = CollectionFactory.CreateQueue <AssignmentProposition>();

            e3.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false));

            smoothed = uw.fixedLagSmoothing(e3);

            System.Console.WriteLine("Day 3 (Umbrella_t=false) smoothed:\nday 2=" + smoothed);

            System.Console.WriteLine("-------");
            System.Console.WriteLine("Lag = 2");
            System.Console.WriteLine("-------");

            uw = new FixedLagSmoothing(HMMExampleFactory.getUmbrellaWorldModel(), 2);

            // Day 1 - Lag 2
            e1 = CollectionFactory.CreateQueue <AssignmentProposition>();
            e1.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            smoothed = uw.fixedLagSmoothing(e1);

            System.Console.WriteLine("Day 1 (Umbrella_t=true) smoothed:\nday 1=" + smoothed);

            // Day 2 - Lag 2
            e2 = CollectionFactory.CreateQueue <AssignmentProposition>();
            e2.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, true));

            smoothed = uw.fixedLagSmoothing(e2);

            System.Console.WriteLine("Day 2 (Umbrella_t=true) smoothed:\nday 1=" + smoothed);

            // Day 3 - Lag 2
            e3 = CollectionFactory.CreateQueue <AssignmentProposition>();
            e3.Add(new AssignmentProposition(ExampleRV.UMBREALLA_t_RV, false));

            smoothed = uw.fixedLagSmoothing(e3);

            System.Console.WriteLine("Day 3 (Umbrella_t=false) smoothed:\nday 1=" + smoothed);

            System.Console.WriteLine("=========================");
        }
 public ICategoricalDistribution multiplyBy(ICategoricalDistribution multiplier)
 {
     return(pointwiseProduct((ProbabilityTable)multiplier));
 }
 public virtual Matrix convert(ICategoricalDistribution fromCD)
 {
     double[] values = fromCD.getValues();
     return(new Matrix(values, values.Length));
 }
 public ICategoricalDistribution multiplyByPOS(ICategoricalDistribution multiplier, params IRandomVariable[] prodVarOrder)
 {
     return(pointwiseProductPOS((ProbabilityTable)multiplier, prodVarOrder));
 }
Exemple #25
0
        //
        // PROTECTED
        //
        protected void test_RollingPairFairDiceModel_Distributions(IFiniteProbabilityModel model)
        {
            AssignmentProposition    ad1_1 = new AssignmentProposition(ExampleRV.DICE_1_RV, 1);
            ICategoricalDistribution dD1_1 = model.priorDistribution(ad1_1);

            assertArrayEquals(new double[] { 1.0 / 6.0 }, dD1_1.getValues(), DELTA_THRESHOLD);

            ICategoricalDistribution dPriorDice1 = model.priorDistribution(ExampleRV.DICE_1_RV);

            assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 },
                              dPriorDice1.getValues(), DELTA_THRESHOLD);

            ICategoricalDistribution dPriorDice2 = model.priorDistribution(ExampleRV.DICE_2_RV);

            assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 },
                              dPriorDice2.getValues(), DELTA_THRESHOLD);

            ICategoricalDistribution dJointDice1Dice2 = model.jointDistribution(ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV);

            Assert.AreEqual(36, dJointDice1Dice2.getValues().Length);
            for (int i = 0; i < dJointDice1Dice2.getValues().Length; i++)
            {
                Assert.AreEqual(1.0 / 36.0, dJointDice1Dice2.getValues()[i], DELTA_THRESHOLD);
            }

            ICategoricalDistribution dJointDice2Dice1 = model.jointDistribution(ExampleRV.DICE_2_RV, ExampleRV.DICE_1_RV);

            Assert.AreEqual(36, dJointDice2Dice1.getValues().Length);
            for (int i = 0; i < dJointDice2Dice1.getValues().Length; i++)
            {
                Assert.AreEqual(1.0 / 36.0, dJointDice2Dice1.getValues()[i], DELTA_THRESHOLD);
            }

            //
            // Test Sets of events
            IntegerSumProposition total11 = new IntegerSumProposition("Total",
                                                                      new FiniteIntegerDomain(11), ExampleRV.DICE_1_RV,
                                                                      ExampleRV.DICE_2_RV);

            // P<>(Total = 11) = <2.0/36.0>
            assertArrayEquals(new double[] { 2.0 / 36.0 }, model.priorDistribution(total11).getValues(), DELTA_THRESHOLD);

            // P<>(Dice1, Total = 11)
            // = <0.0, 0.0, 0.0, 0.0, 1.0/36.0, 1.0/36.0>
            assertArrayEquals(new double[] { 0, 0, 0, 0, 1.0 / 36.0, 1.0 / 36.0 },
                              model.priorDistribution(ExampleRV.DICE_1_RV, total11)
                              .getValues(), DELTA_THRESHOLD);

            EquivalentProposition doubles = new EquivalentProposition("Doubles",
                                                                      ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV);

            // P(Doubles) = <1.0/6.0>
            assertArrayEquals(new double[] { 1.0 / 6.0 }, model
                              .priorDistribution(doubles).getValues(), DELTA_THRESHOLD);

            //
            // Test posterior
            //
            // P<>(Dice1, Total = 11)
            // = <0.0, 0.0, 0.0, 0.0, 0.5, 0.5>
            assertArrayEquals(new double[] { 0, 0, 0, 0, 0.5, 0.5 }, model
                              .posteriorDistribution(ExampleRV.DICE_1_RV, total11)
                              .getValues(), DELTA_THRESHOLD);

            // P<>(Dice1 | Doubles) = <1/6, 1/6, 1/6, 1/6, 1/6, 1/6>
            assertArrayEquals(new double[] { 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0, 1.0 / 6.0 }, model
                              .posteriorDistribution(ExampleRV.DICE_1_RV, doubles)
                              .getValues(), DELTA_THRESHOLD);

            ICategoricalDistribution dPosteriorDice1GivenDice2 = model
                                                                 .posteriorDistribution(ExampleRV.DICE_1_RV, ExampleRV.DICE_2_RV);

            Assert.AreEqual(36, dPosteriorDice1GivenDice2.getValues().Length);
            for (int i = 0; i < dPosteriorDice1GivenDice2.getValues().Length; i++)
            {
                Assert.AreEqual(1.0 / 6.0, dPosteriorDice1GivenDice2.getValues()[i], DELTA_THRESHOLD);
            }

            ICategoricalDistribution dPosteriorDice2GivenDice1 = model
                                                                 .posteriorDistribution(ExampleRV.DICE_2_RV, ExampleRV.DICE_1_RV);

            Assert.AreEqual(36, dPosteriorDice2GivenDice1.getValues().Length);
            for (int i = 0; i < dPosteriorDice2GivenDice1.getValues().Length; i++)
            {
                Assert.AreEqual(1.0 / 6.0, dPosteriorDice2GivenDice1.getValues()[i], DELTA_THRESHOLD);
            }
        }