Exemplo n.º 1
0
 public HiddenMarkovModel(RandomVariable priorDistribution,
         TransitionModel tm, SensorModel sm)
 {
     this.priorDistribution = priorDistribution;
     this._transitionModel = tm;
     this._sensorModel = sm;
 }
        public void SetDistribution(
            RandomVariable variable, 
            IDictionary<string, string> variableAbbreviations,
            IEnumerable<RandomVariable> variableParents,
            DiscreteDistribution distribution)
        {
            _variable = variable;
            _variableParents = variableParents;
            _variableAbbreviations = variableAbbreviations;

            if (variable != null)
            {
                if (distribution != null)
                {
                    _distributions = new DistributionSet(distribution);
                }
                else
                {
                    _distributions = variable.Distributions;
                }
            }
            else
            {
                _distributions = new DistributionSet();
            }

            RefreshUI();
        }
Exemplo n.º 3
0
 public RandomVariable calculate_next_backward_message(
         RandomVariable forwardBelief,
         RandomVariable present_backward_message, String perception)
 {
     RandomVariable result = present_backward_message.duplicate();
     // System.Console.WriteLine("fb :-calculating new backward message");
     // System.Console.WriteLine("fb :-diagonal matrix from sens model = ");
     Matrix oMatrix = _sensorModel.asMatrix(perception);
     // System.Console.WriteLine(oMatrix);
     Matrix transitionMatrix = _transitionModel.asMatrix();// action
     // should
     // be
     // passed
     // in
     // here?
     // System.Console.WriteLine("fb :-present backward message = "
     // +present_backward_message);
     Matrix backwardMatrix = transitionMatrix.times(oMatrix
             .times(present_backward_message.asMatrix()));
     Matrix resultMatrix = backwardMatrix.arrayTimes(forwardBelief
             .asMatrix());
     result.updateFrom(resultMatrix);
     result.normalize();
     // System.Console.WriteLine("fb :-normalized new backward message = "
     // +result);
     return result;
 }
Exemplo n.º 4
0
        // function ELIMINATION-ASK(X, e, bn) returns a distribution over X
        /**
         * The ELIMINATION-ASK algorithm in Figure 14.11.
         * 
         * @param X
         *            the query variables.
         * @param e
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @return a distribution over the query variables.
         */

        public CategoricalDistribution eliminationAsk(RandomVariable[] X,
                                                      AssignmentProposition[] e, BayesianNetwork bn)
        {

            Set<RandomVariable> hidden = new Set<RandomVariable>();
            List<RandomVariable> VARS = new List<RandomVariable>();
            calculateVariables(X, e, bn, hidden, VARS);

            // factors <- []
            List<Factor> factors = new List<Factor>();
            // for each var in ORDER(bn.VARS) do
            foreach (RandomVariable var in order(bn, VARS))
            {
                // factors <- [MAKE-FACTOR(var, e) | factors]
                factors.Add(0, makeFactor(var, e, bn));
                // if var is hidden variable then factors <- SUM-OUT(var, factors)
                if (hidden.Contains(var))
                {
                    factors = sumOut(var, factors, bn);
                }
            }
            // return NORMALIZE(POINTWISE-PRODUCT(factors))
            Factor product = pointwiseProduct(factors);
            // Note: Want to ensure the order of the product matches the
            // query variables
            return ((ProbabilityTable) product.pointwiseProductPOS(_identity, X))
                .normalize();
        }
Exemplo n.º 5
0
 public FixedLagSmoothing(HiddenMarkovModel hmm, int timelag)
 {
     this.hmm = hmm;
     this.timelag = timelag;
     this.evidenceFromSmoothedStepToPresent = new List<String>();
     this.time = 1;
     this.forwardMessage = hmm.prior();
     this.B = hmm.transitionModel().unitMatrix();
 }
Exemplo n.º 6
0
        // function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e)
        /**
         * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given
         * evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution gibbsAsk(RandomVariable[] X,
                                                AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];
            // Z, the nonevidence variables in bn
            Set<RandomVariable> Z = new Set<RandomVariable>(
                bn.getVariablesInTopologicalOrder());
            foreach (AssignmentProposition ap in e)
            {
                Z.Remove(ap.getTermVariable());
            }
            // <b>x</b>, the current state of the network, initially copied from e
            Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>();
            foreach (AssignmentProposition ap in e)
            {
                x.Add(ap.getTermVariable(), ap.getValue());
            }

            // initialize <b>x</b> with random values for the variables in Z
            foreach (RandomVariable Zi in
            Z)
            {
                x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer));
            }

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // for each Z<sub>i</sub> in Z do
                foreach (RandomVariable Zi in
                Z)
                {
                    // set the value of Z<sub>i</sub> in <b>x</b> by sampling from
                    // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>))
                    x.put(Zi,
                          ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer));
                }
                // Note: moving this outside the previous for loop,
                // as described in fig 14.6, as will only work
                // correctly in the case of a single query variable X.
                // However, when multiple query variables, rare events
                // will get weighted incorrectly if done above. In case
                // of single variable this does not happen as each possible
                // value gets * |Z| above, ending up with the same ratios
                // when normalized (i.e. its still more efficient to place
                // outside the loop).
                //
                // <b>N</b>[x] <- <b>N</b>[x] + 1
                // where x is the value of X in <b>x</b>
                N[ProbUtil.indexOf(X, x)] += 1.0;
            }
            // return NORMALIZE(<b>N</b>)
            return new ProbabilityTable(N, X).normalize();
        }
Exemplo n.º 7
0
        public RandomVariable predict(RandomVariable aBelief, String action)
        {
            RandomVariable newBelief = aBelief.duplicate();

            Matrix beliefMatrix = aBelief.asMatrix();
            Matrix transitionMatrix = _transitionModel.asMatrix(action);
            Matrix predicted = transitionMatrix.transpose().times(beliefMatrix);
            newBelief.updateFrom(predicted);
            return newBelief;
        }
 public AbstractTermProposition(RandomVariable var)
 {
     if (null == var)
     {
         throw new ArgumentException(
             "The Random Variable for the Term must be specified.");
     }
     this.termVariable = var;
     addScope(this.termVariable);
 }
Exemplo n.º 9
0
 public static JObject ToJObject(this RandomVariable rv)
 {
     return(new JObject(
                new JProperty("name", rv.Name),
                new JProperty("space", rv.Space.ToJObject()),
                new JProperty("distributionSet", rv.Distributions.ToJObject()),
                new JProperty("parents",
                              new JArray(rv.Parents)
                              )
                ));
 }
Exemplo n.º 10
0
        public RandomVariable Predict(RandomVariable aBelief, string action)
        {
            RandomVariable newBelief = aBelief.Duplicate();

            Matrix beliefMatrix     = aBelief.AsMatrix();
            Matrix transitionMatrix = this.TransitionModel.AsMatrix(action);
            Matrix predicted        = transitionMatrix.Transpose().Times(beliefMatrix);

            newBelief.UpdateFrom(predicted);
            return(newBelief);
        }
Exemplo n.º 11
0
        public RandomVariable predict(RandomVariable aBelief, String action)
        {
            RandomVariable newBelief = aBelief.duplicate();

            Matrix beliefMatrix     = aBelief.asMatrix();
            Matrix transitionMatrix = _transitionModel.asMatrix(action);
            Matrix predicted        = transitionMatrix.transpose().times(beliefMatrix);

            newBelief.updateFrom(predicted);
            return(newBelief);
        }
Exemplo n.º 12
0
        public static Matrix <R> random <R>(RandomVariable <R> rand, int n, int m) where R : FieldLikeObject <R>
        {
            var ent = new R[n, m];

            Parallel.For(0, n, i => {
                Parallel.For(0, m, j => {
                    ent[i, j] = rand.realize();
                });
            });

            return(ent);
        }
Exemplo n.º 13
0
        public void testRoundTripConversion()
        {
            RandomVariable rv = particleSet.toRandomVariable();
            Randomizer     r  = new MockRandomizer(new double[] { 0.1, 0.2, 0.3, 0.4,
                                                                  0.9 });
            ParticleSet ps2 = rv.toParticleSet(rainman, r, 10);

            Assert.AreEqual(8, ps2
                            .numberOfParticlesWithState(HmmConstants.RAINING));
            Assert.AreEqual(2, ps2
                            .numberOfParticlesWithState(HmmConstants.NOT_RAINING));
        }
Exemplo n.º 14
0
 internal void RequestSelectVariable(RandomVariable rv)
 {
     if (rv != null)
     {
         Model.SelectedVariable     = rv.Name;
         Model.SelectedVariableMode = Mode.Inspecting;
     }
     else
     {
         Model.SelectedVariable     = null;
         Model.SelectedVariableMode = Mode.Inspecting;
     }
 }
Exemplo n.º 15
0
        public FullCPTNode(RandomVariable var, double[] values, params Node[] parents)
            : base(var, parents)
        {
            RandomVariable[] conditionedOn = new RandomVariable[getParents().size()];
            int i = 0;

            foreach (Node p in getParents())
            {
                conditionedOn[i++] = p.getRandomVariable();
            }

            cpt = new CPT(var, values, conditionedOn);
        }
Exemplo n.º 16
0
        public RandomVariable smooth(String perception)
        {

            evidenceFromSmoothedStepToPresent.Add(perception);
            Matrix O_t = hmm.sensorModel().asMatrix(perception);
            Matrix transitionMatrix = hmm.transitionModel().asMatrix();
            if (time > timelag)
            {

                forwardMessage = hmm.forward(forwardMessage, perception); // This
                // seems
                // WRONG
                // I think this should be
                // forwardMessage = hmm.forward(forwardMessage,
                // evidenceFromSmoothedStepToPresent.get(0));
                // this the perception at t-d. the book's algorithm
                // uses the latest perception.
                evidenceFromSmoothedStepToPresent.RemoveAt(0);
                Matrix O_t_minus_d = hmm.sensorModel().asMatrix(
                        evidenceFromSmoothedStepToPresent[0]);

                B = O_t_minus_d.inverse().times(
                        transitionMatrix.inverse().times(
                                B.times(transitionMatrix.times(O_t))));

            }
            else
            {

                B = B.times(transitionMatrix.times(O_t));

            }
            time += 1;
            if (time > timelag)
            {

                Matrix one = hmm.prior().createUnitBelief().asMatrix();
                Matrix forwardMatrix = forwardMessage.asMatrix();
                RandomVariable result = hmm.prior().duplicate();
                Matrix backwardMessage = (B.times(one));

                result.updateFrom(forwardMatrix.arrayTimes(backwardMessage));

                result.normalize();
                return result;
            }
            else
            {
                return null;
            }
        }
Exemplo n.º 17
0
        private void AddNode(GraphNode graphNode)
        {
            // Add to visual tree.
            xRoot.Children.Add(graphNode);
            graphNode.SetValue(Canvas.ZIndexProperty, LayerUnimportantNodes);

            // Add to internal list.
            _nodes.Add(graphNode);

            RandomVariable variable = (RandomVariable)graphNode.Tag;

            variable.UserData = graphNode;

            // Ensure canvas is large enough.
            {
                const double padding = 100;
                double       maxX    = _nodes.Max(n => n.Position.X) + padding;
                double       maxY    = _nodes.Max(n => n.Position.Y) + padding;
                xRoot.Width  = Math.Max(xRoot.Width, maxX);
                xRoot.Height = Math.Max(xRoot.Height, maxY);
            }

            // Events.
            graphNode.MouseUp += delegate(object sender, MouseButtonEventArgs e)
            {
                if (graphNode.State == GraphNode.StateEnum.Selecting)
                {
                    App.Current.MainWindow.RequestConfigureVariable(variable);
                }
                else
                {
                    App.Current.MainWindow.RequestSelectVariable(variable);
                }

                e.Handled = true;
            };
            graphNode.SliceChosen += delegate(int sliceIndex, int scenarioIndex)
            {
                Debug.Assert(scenarioIndex == 1 || scenarioIndex == 2);

                if (sliceIndex == -1)
                {
                    App.Current.MainWindow.RequestConfigureVariableWithEvidence(variable, scenarioIndex, null);
                }
                else
                {
                    float evidenceValue = variable.Space.Values.ElementAt(sliceIndex);
                    App.Current.MainWindow.RequestConfigureVariableWithEvidence(variable, scenarioIndex, evidenceValue);
                }
            };
        }
Exemplo n.º 18
0
            public void setExtendedValue(RandomVariable rv, Object value)
            {
                int idx = varIdxs.get(rv);

                extendedValues[idx] = value;
                if (idx >= hiddenStart)
                {
                    extendedIdx = idx;
                }
                else
                {
                    extendedIdx = hiddenStart - 1;
                }
            }
Exemplo n.º 19
0
        public FullCPTNode(RandomVariable var, double[] values, params Node[] parents)
            : base(var, parents)
        {


            RandomVariable[] conditionedOn = new RandomVariable[getParents().size()];
            int i = 0;
            foreach (Node p in getParents())
            {
                conditionedOn[i++] = p.getRandomVariable();
            }

            cpt = new CPT(var, values, conditionedOn);
        }
Exemplo n.º 20
0
    public GameObject getFish()
    {
        int rndVal;

        for (int i = fishIdx.Length - 1; i >= 0; i--)
        {
            rndVal = RandomVariable.getRandomValue(0, i * 3);
            if (rndVal == 0)
            {
                return(getFish(i));
            }
        }

        return(getFish(0));
    }
Exemplo n.º 21
0
        public static HiddenMarkovModel createRainmanHMM()
        {
            List <String> states = new List <String> {
                HmmConstants.RAINING, HmmConstants.NOT_RAINING
            };
            // no actions because the observer has no way of changing the hidden
            // state and i spassive
            List <String> perceptions = new List <String> {
                HmmConstants.SEE_UMBRELLA, HmmConstants.SEE_NO_UMBRELLA
            };

            RandomVariable prior = new RandomVariable(states);

            TransitionModel tm = new TransitionModel(states);

            // tm.setTransitionModelValue(start_state, action, end_state,
            // probability);
            // given a start state and an action the probability of the end state is
            // probability
            tm.setTransitionProbability(HmmConstants.RAINING, HmmConstants.RAINING,
                                        0.7);
            tm.setTransitionProbability(HmmConstants.RAINING,
                                        HmmConstants.NOT_RAINING, 0.3);
            tm.setTransitionProbability(HmmConstants.NOT_RAINING,
                                        HmmConstants.RAINING, 0.3);
            tm.setTransitionProbability(HmmConstants.NOT_RAINING,
                                        HmmConstants.NOT_RAINING, 0.7);

            SensorModel sm = new SensorModel(states, perceptions);

            // sm.setSensingProbaility(state,perception,p); given a state the
            // probability of a perception is p
            sm.setSensingProbability(HmmConstants.RAINING,
                                     HmmConstants.SEE_UMBRELLA, 0.9);
            sm.setSensingProbability(HmmConstants.RAINING,
                                     HmmConstants.SEE_NO_UMBRELLA, 0.1);
            sm.setSensingProbability(HmmConstants.NOT_RAINING,
                                     HmmConstants.SEE_UMBRELLA, 0.2);
            sm.setSensingProbability(HmmConstants.NOT_RAINING,
                                     HmmConstants.SEE_NO_UMBRELLA, 0.8);

            HiddenMarkovModel hmm = new HiddenMarkovModel(prior, tm, sm);

            // hmm.setSensorModelValue(state,perception,p); given a state the
            // probability of a perception is p

            return(hmm);
        }
Exemplo n.º 22
0
        public void SetSelectedVariable(string variableName)
        {
            foreach (var edge in _edges)
            {
                edge.Opacity = 1.0;
                edge.SetValue(Canvas.ZIndexProperty, LayerUnimportantEdges);
            }

            foreach (var node in _nodes)
            {
                node.Opacity = 1.0;

                RandomVariable nodeRV = (RandomVariable)node.Tag;
                if (nodeRV.Name == variableName)
                {
                    node.State = GraphNode.StateEnum.Selecting;
                    node.SetValue(Canvas.ZIndexProperty, LayerSelectedNodes);

                    foreach (var edge in
                             _edges.Where(e => e.To == node || e.From == node))
                    {
                        edge.SetValue(Canvas.ZIndexProperty, LayerSelectedNodesEdges);
                    }
                }
                else
                {
                    node.SetValue(Canvas.ZIndexProperty, LayerUnimportantNodes);
                    if (_interestVariables != null)
                    {
                        if (_interestVariables.Contains(nodeRV.Name))
                        {
                            node.State = GraphNode.StateEnum.Idling;
                        }
                        else
                        {
                            node.State = GraphNode.StateEnum.Minimized;
                        }
                    }
                    else
                    {
                        node.State = GraphNode.StateEnum.Idling;
                    }
                }
            }

            _selectedVariableName = variableName;
        }
Exemplo n.º 23
0
        public CategoricalDistribution jointDistribution(
            params IProposition[] propositions)
        {
            ProbabilityTable d        = null;
            IProposition     conjProp = ProbUtil
                                        .constructConjunction(propositions);
            LinkedHashSet <RandomVariable> vars = new LinkedHashSet <RandomVariable>(
                conjProp.getUnboundScope());

            if (vars.Count > 0)
            {
                RandomVariable[] distVars = new RandomVariable[vars.Count];
                vars.CopyTo(distVars);

                ProbabilityTable ud     = new ProbabilityTable(distVars);
                Object[]         values = new Object[vars.Count];

                //ProbabilityTable.Iterator di = new ProbabilityTable.Iterator() {

                //    public void iterate(Map<RandomVariable, Object> possibleWorld,
                //            double probability) {
                //        if (conjProp.holds(possibleWorld)) {
                //            int i = 0;
                //            for (RandomVariable rv : vars) {
                //                values[i] = possibleWorld.get(rv);
                //                i++;
                //            }
                //            int dIdx = ud.getIndex(values);
                //            ud.setValue(dIdx, ud.getValues()[dIdx] + probability);
                //        }
                //    }
                //};

                //distribution.iterateOverTable(di);
                // TODO:
                d = ud;
            }
            else
            {
                // No Unbound Variables, therefore just return
                // the singular probability related to the proposition.
                d = new ProbabilityTable();
                d.setValue(0, prior(propositions));
            }
            return(d);
        }
Exemplo n.º 24
0
        public RandomVariable smooth(String perception)
        {
            evidenceFromSmoothedStepToPresent.Add(perception);
            Matrix O_t = hmm.sensorModel().asMatrix(perception);
            Matrix transitionMatrix = hmm.transitionModel().asMatrix();

            if (time > timelag)
            {
                forwardMessage = hmm.forward(forwardMessage, perception); // This
                // seems
                // WRONG
                // I think this should be
                // forwardMessage = hmm.forward(forwardMessage,
                // evidenceFromSmoothedStepToPresent.get(0));
                // this the perception at t-d. the book's algorithm
                // uses the latest perception.
                evidenceFromSmoothedStepToPresent.RemoveAt(0);
                Matrix O_t_minus_d = hmm.sensorModel().asMatrix(
                    evidenceFromSmoothedStepToPresent[0]);

                B = O_t_minus_d.inverse().times(
                    transitionMatrix.inverse().times(
                        B.times(transitionMatrix.times(O_t))));
            }
            else
            {
                B = B.times(transitionMatrix.times(O_t));
            }
            time += 1;
            if (time > timelag)
            {
                Matrix         one             = hmm.prior().createUnitBelief().asMatrix();
                Matrix         forwardMatrix   = forwardMessage.asMatrix();
                RandomVariable result          = hmm.prior().duplicate();
                Matrix         backwardMessage = (B.times(one));

                result.updateFrom(forwardMatrix.arrayTimes(backwardMessage));

                result.normalize();
                return(result);
            }
            else
            {
                return(null);
            }
        }
Exemplo n.º 25
0
	public CPT(RandomVariable on, double[] values,
			params RandomVariable [] conditionedOn) {
		this.on = on;
		if (null == conditionedOn) {
			conditionedOn = new RandomVariable[0];
		}
		RandomVariable[] tableVars = new RandomVariable[conditionedOn.Length + 1];
		for (int i = 0; i < conditionedOn.Length; i++) {
			tableVars[i] = conditionedOn[i];
			parents.add(conditionedOn[i]);
		}
		tableVars[conditionedOn.Length] = on;
		table = new ProbabilityTable(values, tableVars);
		onDomain.AddRange(((FiniteDomain) on.getDomain()).getPossibleValues());

		checkEachRowTotalsOne();
	}
Exemplo n.º 26
0
        // END-TermProposition
        //

        public bool equals(Object o)
        {
            if (this == o)
            {
                return(true);
            }
            if (!(o is RandomVariable))
            {
                return(false);
            }

            // The name (not the name:domain combination) uniquely identifies a
            // Random Variable
            RandomVariable other = (RandomVariable)o;

            return(this.name.Equals(other.getName()));
        }
Exemplo n.º 27
0
        /// <summary>
        /// Инициализация источника требований
        /// </summary>
        /// <param name="r">Интервалы между поступлениями требований</param>
        /// <param name="RouteRow">Строка для маршрутизации требований</param>
        /// <param name="ID">Идентификатор узла</param>
        public SourceNode(int ID, Random r, RandomVariable ArrivalInterval, Node[] Nodes, InfoNode Info, double[] RouteRow)
        {
            //Передача параметров
            this.ID = ID;
            this.r  = r;
            this.ArrivalInterval = ArrivalInterval;
            this.Nodes           = Nodes;
            this.Info            = Info;
            this.r        = r;
            this.RouteRow = RouteRow;

            //Первое поступление происходит в нулевой момент времени
            this.NextEventTime = 0;
            FragmentCounter    = 0;
            //Для сбора статистики
            ResponseTimes = new List <double>();
        }
Exemplo n.º 28
0
        public RandomVariable Smooth(String perception)
        {
            evidenceFromSmoothedStepToPresent.Add(perception);
            Matrix oT = hmm.SensorModel.AsMatrix(perception);
            Matrix transitionMatrix = hmm.TransitionModel.AsMatrix();

            if (time > timelag)
            {
                forwardMessage = hmm.Forward(forwardMessage, perception); // This
                // seems
                // WRONG
                // I think this should be
                // forwardMessage = hmm.forward(forwardMessage,
                // evidenceFromSmoothedStepToPresent.get(0));
                // this the perception at t-d. the book's algorithm
                // uses the latest perception.
                evidenceFromSmoothedStepToPresent.RemoveAt(0);
                Matrix oTMinusD = hmm.SensorModel.AsMatrix(
                    evidenceFromSmoothedStepToPresent[0]);

                B = oTMinusD.Inverse().Times(
                    transitionMatrix.Inverse().Times(
                        B.Times(transitionMatrix.Times(oT))));
            }
            else
            {
                B = B.Times(transitionMatrix.Times(oT));
            }
            time += 1;
            if (time > timelag)
            {
                Matrix         one             = hmm.Prior().CreateUnitBelief().AsMatrix();
                Matrix         forwardMatrix   = forwardMessage.AsMatrix();
                RandomVariable result          = hmm.Prior().Duplicate();
                Matrix         backwardMessage = (B.Times(one));

                result.UpdateFrom(forwardMatrix.ArrayTimes(backwardMessage));

                result.Normalize();
                return(result);
            }
            else
            {
                return(null);
            }
        }
Exemplo n.º 29
0
        public void SetData(
            RandomVariable variable,
            IDictionary<string,string> variableAbbreviations,
            FDiscreteDistribution distribution,
            FObservation conditionedOn,
            IEnumerable<RandomVariable> parents)
        {
            _variable = variable;
            _distribution = distribution;
            _conditionedOn = conditionedOn;
            _variableAbbreviations = variableAbbreviations;
            _parents
                = parents
                .Where(p => conditionedOn.Any(kvp => kvp.Key == p.Name))
                .ToArray();

            RefreshUI();
        }
Exemplo n.º 30
0
        public void testRecursiveBackwardMessageCalculationIsCorrect()
        {
            RandomVariable afterOneStep = rainmanHmm.forward(rainmanHmm.prior(),
                                                             HmmConstants.DO_NOTHING, HmmConstants.SEE_UMBRELLA);
            RandomVariable afterTwoSteps = rainmanHmm.forward(afterOneStep,
                                                              HmmConstants.DO_NOTHING, HmmConstants.SEE_UMBRELLA);

            RandomVariable postSequence = afterTwoSteps.duplicate()
                                          .createUnitBelief();

            RandomVariable smoothed = rainmanHmm.calculate_next_backward_message(
                afterOneStep, postSequence, HmmConstants.SEE_UMBRELLA);

            Assert.AreEqual(0.883, smoothed
                            .getProbabilityOf(HmmConstants.RAINING), TOLERANCE);
            Assert.AreEqual(0.117, smoothed
                            .getProbabilityOf(HmmConstants.NOT_RAINING), TOLERANCE);
        }
Exemplo n.º 31
0
        public Model GetModel(RandomVariable randomVariable, IStochasticDomainMapper domainMapper, int iteration)
        {
            var m = new Model();

            m.NodesDictionary.Add(0, new Node(id: 0, x: 0, y:  0, z: 0));
            m.NodesDictionary.Add(1, new Node(id: 1, x: 1, y:  0, z: 0));
            m.ElementsDictionary.Add(1, new Element()
            {
                ID          = 1,
                ElementType = new EulerBeam3D(randomVariable.Realize(iteration, domainMapper, null), 0.3)
            });
            m.Loads.Add(new Load()
            {
                Amount = 10, DOF = StructuralDof.TranslationX, Node = m.NodesDictionary[1]
            });

            return(m);
        }
Exemplo n.º 32
0
        public void testForwardMessagingWorksForFiltering()
        {
            RandomVariable afterOneStep = robotHmm.forward(robotHmm.prior(),
                                                           HmmConstants.DO_NOTHING, HmmConstants.SEE_DOOR_OPEN);

            Assert.AreEqual(0.75, afterOneStep
                            .getProbabilityOf(HmmConstants.DOOR_OPEN), TOLERANCE);
            Assert.AreEqual(0.25, afterOneStep
                            .getProbabilityOf(HmmConstants.DOOR_CLOSED), TOLERANCE);

            RandomVariable afterTwoSteps = robotHmm.forward(afterOneStep,
                                                            HmmConstants.PUSH_DOOR, HmmConstants.SEE_DOOR_OPEN);

            Assert.AreEqual(0.983, afterTwoSteps
                            .getProbabilityOf(HmmConstants.DOOR_OPEN), TOLERANCE);
            Assert.AreEqual(0.017, afterTwoSteps
                            .getProbabilityOf(HmmConstants.DOOR_CLOSED), TOLERANCE);
        }
Exemplo n.º 33
0
        public void SetData(
            RandomVariable variable,
            IDictionary <string, string> variableAbbreviations,
            FDiscreteDistribution distribution,
            FObservation conditionedOn,
            IEnumerable <RandomVariable> parents)
        {
            _variable              = variable;
            _distribution          = distribution;
            _conditionedOn         = conditionedOn;
            _variableAbbreviations = variableAbbreviations;
            _parents
                = parents
                  .Where(p => conditionedOn.Any(kvp => kvp.Key == p.Name))
                  .ToArray();

            RefreshUI();
        }
Exemplo n.º 34
0
        public void testOneStepFixedLagSmoothingOnRainManHmmWithDifferingEvidence()
        {
            FixedLagSmoothing fls = new FixedLagSmoothing(rainmanHmm, 1);

            RandomVariable smoothedDayZero = fls.smooth(HmmConstants.SEE_UMBRELLA);// see

            // umbrella on day one
            Assert.AreEqual(0.627, smoothedDayZero
                            .getProbabilityOf(HmmConstants.RAINING), TOLERANCE);

            RandomVariable smoothedDayOne = fls
                                            .smooth(HmmConstants.SEE_NO_UMBRELLA);// no umbrella on day

            // two
            Assert.AreEqual(0.702, smoothedDayOne
                            .getProbabilityOf(HmmConstants.RAINING), TOLERANCE);
            Assert.AreEqual(0.297, smoothedDayOne
                            .getProbabilityOf(HmmConstants.NOT_RAINING), TOLERANCE);
        }
Exemplo n.º 35
0
    public GameObject getTrap(Transform trans)
    {
        int val;

        if (RandomVariable.getRandomTrap() != 0)
        {
            return(null);
        }

        do
        {
            val = RandomVariable.getRandomValue(0, trapPrefab.Length);
            if (val >= DEFAULT_ENABLETRAP.GetLength(1))
            {
                return(null);
            }
        } while (!DEFAULT_ENABLETRAP[GameAttribute.Instance.level, val] && val < DEFAULT_ENABLETRAP.GetLength(1));

        return(Instantiate(trapPrefab[val], trans.position, Quaternion.identity) as GameObject);
    }
Exemplo n.º 36
0
        public RandomVariable perceptionUpdate(RandomVariable aBelief,
                String perception)
        {
            RandomVariable newBelief = aBelief.duplicate();

            // one way - use matrices
            Matrix beliefMatrix = aBelief.asMatrix();
            Matrix o_matrix = _sensorModel.asMatrix(perception);
            Matrix updated = o_matrix.times(beliefMatrix);
            newBelief.updateFrom(updated);
            newBelief.normalize();
            return newBelief;

            // alternate way of doing this. clearer in intent.
            // for (String state : aBelief.states()){
            // double probabilityOfPerception= sensorModel.get(state,perception);
            // newBelief.setProbabilityOf(state,probabilityOfPerception *
            // aBelief.getProbabilityOf(state));
            // }
        }
Exemplo n.º 37
0
        public AbstractNode(RandomVariable var, params Node[] parents)
        {
            if (null == var)
            {
                throw new ArgumentException(
                          "Random Variable for Node must be specified.");
            }
            this.variable = var;
            this.parents  = new Set <Node>();
            if (null != parents)
            {
                foreach (Node p in parents)
                {
                    ((AbstractNode)p).addChild(this);
                    this.parents.add(p);
                }
            }

            this.children = new Set <Node>();
        }
Exemplo n.º 38
0
        public CPT(RandomVariable on, double[] values,
                   params RandomVariable [] conditionedOn)
        {
            this.on = on;
            if (null == conditionedOn)
            {
                conditionedOn = new RandomVariable[0];
            }
            RandomVariable[] tableVars = new RandomVariable[conditionedOn.Length + 1];
            for (int i = 0; i < conditionedOn.Length; i++)
            {
                tableVars[i] = conditionedOn[i];
                parents.add(conditionedOn[i]);
            }
            tableVars[conditionedOn.Length] = on;
            table = new ProbabilityTable(values, tableVars);
            onDomain.AddRange(((FiniteDomain)on.getDomain()).getPossibleValues());

            checkEachRowTotalsOne();
        }
Exemplo n.º 39
0
        void OnBayesianNetworkStructureChanged(object sender, BayesianNetwork args)
        {
            Dispatcher.Invoke(delegate
            {
                // If structure changed, random variable instances were shed.
                // Find the latest instances.
                foreach (GraphNode node in _nodes)
                {
                    RandomVariable oldVariable = (RandomVariable)node.Tag;
                    if (_network.HasVariable(oldVariable.Name))
                    {
                        RandomVariable newVariable = _network.GetVariable(oldVariable.Name);
                        node.Tag = newVariable;
                    }
                }

                // Update edges.
                UpdateEdges();
            });
        }
Exemplo n.º 40
0
        // function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)
        /**
         * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries
         * given evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param N
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution likelihoodWeighting(RandomVariable[] X,
                                                           AssignmentProposition[] e, BayesianNetwork bn, int N)
        {
            // local variables: W, a vector of weighted counts for each value of X,
            // initially zero
            double[] W = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < N; j++)
            {
                // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e)
                Pair<Map<RandomVariable, Object>, Double> x_w = weightedSample(bn,
                                                                               e);
                // W[x] <- W[x] + w where x is the value of X in <b>x</b>
                W[ProbUtil.indexOf(X, x_w.getFirst())] += x_w.getSecond();
            }
            // return NORMALIZE(W)
            return new ProbabilityTable(W, X).normalize();
        }
Exemplo n.º 41
0
        public AbstractNode(RandomVariable var, params Node[] parents)
        {
            if (null == var)
            {
                throw new ArgumentException(
                    "Random Variable for Node must be specified.");
            }
            this.variable = var;
            this.parents = new Set<Node>();
            if (null != parents)
            {
                foreach (Node p in parents)
                {
                    ((AbstractNode) p).addChild(this);
                    this.parents.add(p);
                }
            }

            this.children = new Set<Node>();
        }
Exemplo n.º 42
0
        public RandomVariable perceptionUpdate(RandomVariable aBelief,
                                               String perception)
        {
            RandomVariable newBelief = aBelief.duplicate();

            // one way - use matrices
            Matrix beliefMatrix = aBelief.asMatrix();
            Matrix o_matrix     = _sensorModel.asMatrix(perception);
            Matrix updated      = o_matrix.times(beliefMatrix);

            newBelief.updateFrom(updated);
            newBelief.normalize();
            return(newBelief);

            // alternate way of doing this. clearer in intent.
            // for (String state : aBelief.states()){
            // double probabilityOfPerception= sensorModel.get(state,perception);
            // newBelief.setProbabilityOf(state,probabilityOfPerception *
            // aBelief.getProbabilityOf(state));
            // }
        }
Exemplo n.º 43
0
        /// <summary>
        /// Implementation of the activity
        /// </summary>
        /// <param name="context">The context used to schedule</param>
        protected override HiddenMarkovModel Execute(CodeActivityContext context)
        {
            List <String>   statesList = States.ToList <String>();
            RandomVariable  prior      = new RandomVariable(statesList);
            TransitionModel tm         = new TransitionModel(States.ToList <String>());

            foreach (Triplet <string, string, double> transitionProbability in TransitionProbabilities)
            {
                tm.setTransitionProbability(transitionProbability.First, transitionProbability.Second, transitionProbability.Third);
            }

            SensorModel sm = new SensorModel(States.ToList <String>(), Perceptions.ToList <String>());

            foreach (Triplet <string, string, double> sensingProbability in SensingProbabilities)
            {
                sm.setSensingProbability(sensingProbability.First, sensingProbability.Second, sensingProbability.Third);
            }
            HiddenMarkovModel result = new HiddenMarkovModel(prior, tm, sm);

            return(result);
        }
Exemplo n.º 44
0
        public static float GetRandomFloat(this RandomVariable variable)
        {
            var min = variable.Low;
            var max = variable.High;

            if (min == max)
            {
                return(min);
            }

            // This is how the original engine behaves.
            if (max < min)
            {
                return(max);
            }

            return(variable.DistributionType switch
            {
                DistributionType.Uniform => min + ((float)Random.NextDouble() * (max - min)),
                _ => throw new NotSupportedException(),
            });
Exemplo n.º 45
0
        /**
         *
         * @param probabilityChoice
         *            a probability choice for the sample
         * @param Xi
         *            a Random Variable with a finite domain from which a random
         *            sample is to be chosen based on the probability choice.
         * @param distribution
         *            Xi's distribution.
         * @return a Random Sample from Xi's domain.
         */

        public static Object sample(double probabilityChoice, RandomVariable Xi,
                                    double[] distribution)
        {
            FiniteDomain fd = (FiniteDomain)Xi.getDomain();

            if (fd.size() != distribution.Length)
            {
                throw new ArgumentException("Size of domain Xi " + fd.size()
                                            + " is not equal to the size of the distribution "
                                            + distribution.Length);
            }
            int    i     = 0;
            double total = distribution[0];

            while (probabilityChoice > total)
            {
                i++;
                total += distribution[i];
            }
            return(fd.getValueAt(i));
        }
Exemplo n.º 46
0
        public RandomVariable toRandomVariable() {
		List<String> states = new List<String>();
		Dictionary<String, int> stateCount = new Dictionary<String, int>();
		foreach (Particle p in particles) {
			String state = p.getState();
			if (!(states.Contains(state))) {
				states.Add(state);
				stateCount.Add(state, 0);
			}

			stateCount[state]++;

		}

		RandomVariable result = new RandomVariable(states);
		foreach (String state in stateCount.Keys) {
			result.setProbabilityOf(state,
					((double) stateCount[state] / particles.Count));
		}
		return result;
	}
Exemplo n.º 47
0
        // function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)
        /**
         * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries
         * given evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution rejectionSampling(RandomVariable[] X,
                                                         AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // <b>x</b> <- PRIOR-SAMPLE(bn)
                Map<RandomVariable, Object> x = ps.priorSample(bn);
                // if <b>x</b> is consistent with e then
                if (isConsistent(x, e))
                {
                    // <b>N</b>[x] <- <b>N</b>[x] + 1
                    // where x is the value of X in <b>x</b>
                    N[ProbUtil.indexOf(X, x)] += 1.0;
                }
            }
            // return NORMALIZE(<b>N</b>)
            return new ProbabilityTable(N, X).normalize();
        }
Exemplo n.º 48
0
        //
        // START-BayesSampleInference

        public CategoricalDistribution ask(RandomVariable[] X,
                                           AssignmentProposition[] observedEvidence,
                                           BayesianNetwork bn, int N)
        {
            return likelihoodWeighting(X, observedEvidence, bn, N);
        }
Exemplo n.º 49
0
 public Node getNode(RandomVariable rv)
 {
     return varToNodeMap.get(rv);
 }
Exemplo n.º 50
0
        private List<Factor> sumOut(RandomVariable var, List<Factor> factors,
                                    BayesianNetwork bn)
        {
            List<Factor> summedOutFactors = new List<Factor>();
            List<Factor> toMultiply = new List<Factor>();
            foreach (Factor f in factors)
            {
                if (f.contains(var))
                {
                    toMultiply.Add(f);
                }
                else
                {
                    // This factor does not contain the variable
                    // so no need to sum out - see AIMA3e pg. 527.
                    summedOutFactors.Add(f);
                }
            }

            summedOutFactors.Add(pointwiseProduct(toMultiply).sumOut(var));

            return summedOutFactors;
        }
Exemplo n.º 51
0
        public AbstractNode(RandomVariable var)
            : this(var, null)
        {

        }
Exemplo n.º 52
0
        public AssignmentProposition(RandomVariable forVariable, Object value)
            : base(forVariable)
        {

            setValue(value);
        }
Exemplo n.º 53
0
 //
 // START-BayesInference
 public CategoricalDistribution ask(RandomVariable[] X,
                                    AssignmentProposition[] observedEvidence,
                                    BayesianNetwork bn)
 {
     return this.eliminationAsk(X, observedEvidence, bn);
 }
Exemplo n.º 54
0
        // END-Proposition
        //

        //
        // Protected Methods
        //
        protected void addScope(RandomVariable var)
        {
            scope.Add(var);
        }
Exemplo n.º 55
0
 protected void addUnboundScope(RandomVariable var)
 {
     unboundScope.add(var);
 }
Exemplo n.º 56
0
	public bool contains(RandomVariable rv) {
		return table.contains(rv);
	}
Exemplo n.º 57
0
  /**
   * 
   * @param rv
   *            the Random Variable to be checked.
   * @return true if this Factor contains the passed in Random Variable, false
   *         otherwise.
   */
 abstract public bool contains(RandomVariable rv);
Exemplo n.º 58
0
        /**
         * Calculate the indexes for X[i] into a vector representing the enumeration
         * of the value assignments for the variables X and their corresponding
         * assignment in x. For example the Random Variables:<br>
         * Q::{true, false}, R::{'A', 'B','C'}, and T::{true, false}, would be
         * enumerated in a Vector as follows:
         * 
         * <pre>
         * Index  Q      R  T
         * -----  -      -  -
         * 00:    true,  A, true
         * 01:    true,  A, false
         * 02:    true,  B, true
         * 03:    true,  B, false
         * 04:    true,  C, true
         * 05:    true,  C, false
         * 06:    false, A, true
         * 07:    false, A, false
         * 08:    false, B, true
         * 09:    false, B, false
         * 10:    false, C, true
         * 11:    false, C, false
         * </pre>
         * 
         * if X[i] = R and x = {..., R='C', ...} then the indexes returned would be
         * [4, 5, 10, 11].
         * 
         * @param X
         *            a list of the Random Variables that would comprise the vector.
         * @param idx
         *            the index into X for the Random Variable whose assignment we
         *            wish to retrieve its indexes for.
         * @param x
         *            an assignment for the Random Variables in X.
         * @return the indexes into a vector that would represent the enumeration of
         *         the values for X[i] in x.
         */

        public static int[] indexesOfValue(RandomVariable[] X, int idx,
                                           Map<RandomVariable, Object> x)
        {
            int csize = ProbUtil.expectedSizeOfCategoricalDistribution(X);

            FiniteDomain fd = (FiniteDomain) X[idx].getDomain();
            int vdoffset = fd.getOffset(x.get(X[idx]));
            int vdosize = fd.size();
            int[] indexes = new int[csize/vdosize];

            int blocksize = csize;
            for (int i = 0; i < X.length; i++)
            {
                blocksize = blocksize/X[i].getDomain().size();
                if (i == idx)
                {
                    break;
                }
            }

            for (int i = 0; i < indexes.Length; i += blocksize)
            {
                int offset = ((i/blocksize)*vdosize*blocksize)
                             + (blocksize*vdoffset);
                for (int b = 0; b < blocksize; b++)
                {
                    indexes[i + b] = offset + b;
                }
            }

            return indexes;
        }
Exemplo n.º 59
0
        /**
         * Calculate the index into a vector representing the enumeration of the
         * value assignments for the variables X and their corresponding assignment
         * in x. For example the Random Variables:<br>
         * Q::{true, false}, R::{'A', 'B','C'}, and T::{true, false}, would be
         * enumerated in a Vector as follows:
         * 
         * <pre>
         * Index  Q      R  T
         * -----  -      -  -
         * 00:    true,  A, true
         * 01:    true,  A, false
         * 02:    true,  B, true
         * 03:    true,  B, false
         * 04:    true,  C, true
         * 05:    true,  C, false
         * 06:    false, A, true
         * 07:    false, A, false
         * 08:    false, B, true
         * 09:    false, B, false
         * 10:    false, C, true
         * 11:    false, C, false
         * </pre>
         * 
         * if x = {Q=true, R='C', T=false} the index returned would be 5.
         * 
         * @param X
         *            a list of the Random Variables that would comprise the vector.
         * @param x
         *            an assignment for the Random Variables in X.
         * @return an index into a vector that would represent the enumeration of
         *         the values for X.
         */

        public static int indexOf(RandomVariable[] X, Map<RandomVariable, Object> x)
        {
            if (0 == X.length)
            {
                return ((FiniteDomain) X[0].getDomain()).getOffset(x.get(X[0]));
            }
            // X.length > 1 then calculate using a mixed radix number
            //
            // Note: Create radices in reverse order so that the enumeration
            // through the distributions is of the following
            // order using a MixedRadixNumber, e.g. for two Booleans:
            // X Y
            // true true
            // true false
            // false true
            // false false
            // which corresponds with how displayed in book.
            int[] radixValues = new int[X.length];
            int[] radices = new int[X.length];
            int j = X.length - 1;
            for (int i = 0; i < X.length; i++)
            {
                FiniteDomain fd = (FiniteDomain) X[i].getDomain();
                radixValues[j] = fd.getOffset(x.get(X[i]));
                radices[j] = fd.size();
                j--;
            }

            return new MixedRadixNumber(radixValues, radices).intValue();
        }
Exemplo n.º 60
0
        /**
         * 
         * @param probabilityChoice
         *            a probability choice for the sample
         * @param Xi
         *            a Random Variable with a finite domain from which a random
         *            sample is to be chosen based on the probability choice.
         * @param distribution
         *            Xi's distribution.
         * @return a Random Sample from Xi's domain.
         */

        public static Object sample(double probabilityChoice, RandomVariable Xi,
                                    double[] distribution)
        {
            FiniteDomain fd = (FiniteDomain) Xi.getDomain();
            if (fd.size() != distribution.Length)
            {
                throw new ArgumentException("Size of domain Xi " + fd.size()
                                            + " is not equal to the size of the distribution "
                                            + distribution.Length);
            }
            int i = 0;
            double total = distribution[0];
            while (probabilityChoice > total)
            {
                i++;
                total += distribution[i];
            }
            return fd.getValueAt(i);
        }