예제 #1
0
        public Workbench()
        {
            this._bayesianNetwork = new BayesianNetwork("Empty");
            this._bayesianNetworkVariableAbbreviations = new Dictionary<string, string>();

            this._scenarios = new ObservableCollection<IScenario>();
            this._scenarios.CollectionChanged += ScenariosChanged;
            this._scenariosInternal = new List<ScenarioRecord>();

            this._scenariosThreadCancel = false;
            this._scenariosThread = new Thread(ThreadMainScenariosInference);
            this._scenariosThread.Name = "Inference";
            this._scenariosThread.Start();

            this._learningTasks = new ObservableCollection<ILearningTask>();
            this._learningTasks.CollectionChanged += LearningTasksChanged;
            this._learningTasksInternal = new List<LearningTaskRecord>();

            this._learningTasksThreadCancel = false;
            this._learningTasksThread = new Thread(ThreadMainLearningTasks);
            this._learningTasksThread.Name = "Learning";
            this._learningTasksThread.Start();

            this._networkLayoutOptions = new NetworkLayoutOptions();
            this._networkLayout = new NetworkLayout();
            this._networkLayoutInternal = new NetworkLayoutRecord(_bayesianNetwork, _networkLayout, this.NetworkLayoutOptions);

            this._networkLayoutThreadCancel = false;
            this._networkLayoutThread = new Thread(ThreadMainNetworkLayout);
            this._networkLayoutThread.Name = "Layout";
            this._networkLayoutThread.Start();

            this.ComparisonMetric = Model.ComparisonMetric.SymmetricKLDivergence;
        }
 /// <summary>
 /// Construct a Bayesian trainer. Use K2 to search, and the SimpleEstimator
 /// to estimate probability.  Init as Naive Bayes
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theData">The data to train.</param>
 /// <param name="theMaximumParents">The max number of parents.</param>
 public TrainBayesian(BayesianNetwork theNetwork, IMLDataSet theData,
                      int theMaximumParents)
     : this(theNetwork, theData, theMaximumParents,
            BayesianInit.InitNaiveBayes, new SearchK2(),
            new SimpleEstimator())
 {
 }
예제 #3
0
        // function ELIMINATION-ASK(X, e, bn) returns a distribution over X

        /**
         * The ELIMINATION-ASK algorithm in Figure 14.11.
         *
         * @param X
         *            the query variables.
         * @param e
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @return a distribution over the query variables.
         */

        public CategoricalDistribution eliminationAsk(RandomVariable[] X,
                                                      AssignmentProposition[] e, BayesianNetwork bn)
        {
            Set <RandomVariable>  hidden = new Set <RandomVariable>();
            List <RandomVariable> VARS   = new List <RandomVariable>();

            calculateVariables(X, e, bn, hidden, VARS);

            // factors <- []
            List <Factor> factors = new List <Factor>();

            // for each var in ORDER(bn.VARS) do
            foreach (RandomVariable var in order(bn, VARS))
            {
                // factors <- [MAKE-FACTOR(var, e) | factors]
                factors.Add(0, makeFactor(var, e, bn));
                // if var is hidden variable then factors <- SUM-OUT(var, factors)
                if (hidden.Contains(var))
                {
                    factors = sumOut(var, factors, bn);
                }
            }
            // return NORMALIZE(POINTWISE-PRODUCT(factors))
            Factor product = pointwiseProduct(factors);

            // Note: Want to ensure the order of the product matches the
            // query variables
            return(((ProbabilityTable)product.pointwiseProductPOS(_identity, X))
                   .normalize());
        }
예제 #4
0
        public void TestK2Structure()
        {
            String[] labels = { "available", "not" };

            IMLDataSet      data    = new BasicMLDataSet(DATA, null);
            BayesianNetwork network = new BayesianNetwork();
            BayesianEvent   x1      = network.CreateEvent("x1", labels);
            BayesianEvent   x2      = network.CreateEvent("x2", labels);
            BayesianEvent   x3      = network.CreateEvent("x3", labels);

            network.FinalizeStructure();
            TrainBayesian train = new TrainBayesian(network, data, 10);

            train.InitNetwork = BayesianInit.InitEmpty;
            while (!train.TrainingDone)
            {
                train.Iteration();
            }
            train.Iteration();
            Assert.IsTrue(x1.Parents.Count == 0);
            Assert.IsTrue(x2.Parents.Count == 1);
            Assert.IsTrue(x3.Parents.Count == 1);
            Assert.IsTrue(x2.Parents.Contains(x1));
            Assert.IsTrue(x3.Parents.Contains(x2));
            Assert.AreEqual(0.714, network.GetEvent("x2").Table.FindLine(1, new int[] { 1 }).Probability, 0.001);
        }
예제 #5
0
        public static BayesianNetwork ToBayesianNetwork(this JObject json)
        {
            string networkName = json["name"].Value<string>();

            var bn = new BayesianNetwork(networkName);

            // Load variables and note parents.
            var variableList = new List<string>();
            var parentsList = new Dictionary<string, string[]>();
            foreach (var jrv in json.Property("variables").Value)
            {
                var rv = ToRandomVariable(jrv);
                bn.AddVariable(rv);

                variableList.Add(rv.Name);
                parentsList.Add(rv.Name, jrv["parents"].Values<string>().ToArray());
            }

            // Reparent.
            foreach (var rv in variableList)
            {
                string[] parents = parentsList[rv];
                foreach (var parentName in parents)
                {
                    bn.ConnectVariables(parentName, rv);
                }
            }

            return bn;
        }
예제 #6
0
        /// <summary>
        /// Calculate G. 
        /// </summary>
        /// <param name="network">The network to calculate for.</param>
        /// <param name="e">The event to calculate for.</param>
        /// <param name="parents">The parents.</param>
        /// <returns>The value for G.</returns>
        public double CalculateG(BayesianNetwork network,
                                 BayesianEvent e, IList<BayesianEvent> parents)
        {
            double result = 1.0;
            int r = e.Choices.Count;

            var args = new int[parents.Count];

            do
            {
                double n = EncogMath.Factorial(r - 1);
                double d = EncogMath.Factorial(CalculateN(network, e,
                                                          parents, args) + r - 1);
                double p1 = n/d;

                double p2 = 1;
                for (int k = 0; k < e.Choices.Count; k++)
                {
                    p2 *= EncogMath.Factorial(CalculateN(network, e, parents, args, k));
                }

                result *= p1*p2;
            } while (EnumerationQuery.Roll(parents, args));

            return result;
        }
    private List <int> unlockableDenominators;//Denominators that can be unlocked;

    /// <summary>
    /// Constructor
    /// </summary>
    public LearnerModelingComponent()
    {
        this.bayesNet               = new BayesianNetwork();
        this.denominatorMastery     = new List <DenominatorMastery> ();
        this.unlockableDenominators = new List <int> ();

        if (PlayerPrefs.HasKey("Username"))
        {
            this.owner = PlayerPrefs.GetString("Username");
        }
        else
        {
            this.owner = "default";
        }

        folderDataPath       = Application.persistentDataPath + "/Game Data/user_" + this.owner;
        learnerModelDataPath = folderDataPath + "/" + this.owner + "_LearnerModel.json";

        //Unlockable denominators
        //NOTE: SINCE WE'LL BE THE ONES SUPPLYING THE DENOMINATORS, I DID NOT MAKE A HANDLE FOR DUPLICATES
        this.unlockableDenominators.Add(4);
        this.unlockableDenominators.Add(2);
        this.unlockableDenominators.Add(3);
        this.unlockableDenominators.Add(6);
        this.unlockableDenominators.Add(8);
        this.unlockableDenominators.Add(9);

        LoadLearnerModel();
    }
예제 #8
0
 public Scenario(string id, FObservation evidence, BayesianNetwork network)
 {
     _posteriorMarginals = new Dictionary<string, FDiscreteDistribution>();
     _bayesianNetwork = network;
     Id = id;
     Evidence = evidence;
 }
        public Simulator(string path)
        {
            graph     = new FileParser().ParseFile(path);
            network   = new BayesianNetwork(graph);
            evidences = new List <Evidence>();

            commandsMapper = new[]
            {
                UserAction.Of("Reset", Reset),
                UserAction.Of("Add Evidence", AddEvidence),
                UserAction.Of("Probabilistic Reasoning", ProbabilisticReasoning),
                UserAction.Of("Quit", Quit)
            };

            probabilisticReasoning = new[]
            {
                UserFunction.Of("What is the probability that each of the vertices contains evacuees?", () => QueryNodes(network.EvacueeNodes)),
                UserFunction.Of("What is the probability that each of the vertices is flooded?", () => QueryNodes(network.FloodingNodes)),
                UserFunction.Of("What is the probability that each of the edges is blocked?", () => QueryNodes(network.BlockageNodes)),
                UserFunction.Of("What is the probability that a certain path is free from blockages?", IsPathFree),
                UserFunction.Of(
                    "What is the path from a given location to a goal that has the highest probability of being free from blockages?",
                    BestPath),
                UserFunction.Of("All", () =>
                                probabilisticReasoning[0].Action()
                                .Concat(probabilisticReasoning[1].Action()).ToList()
                                .Concat(probabilisticReasoning[2].Action()).ToList())
            };

            Start();
        }
        public void Execute(IExampleInterface app)
        {
            // build the bayesian network structure
            BayesianNetwork network        = new BayesianNetwork();
            BayesianEvent   BlueTaxi       = network.CreateEvent("blue_taxi");
            BayesianEvent   WitnessSawBlue = network.CreateEvent("saw_blue");

            network.CreateDependency(BlueTaxi, WitnessSawBlue);
            network.FinalizeStructure();
            // build the truth tales
            BlueTaxi.Table.AddLine(0.85, true);
            WitnessSawBlue.Table.AddLine(0.80, true, true);
            WitnessSawBlue.Table.AddLine(0.20, true, false);

            // validate the network
            network.Validate();
            // display basic stats
            Console.WriteLine(network.ToString());
            Console.WriteLine("Parameter count: " + network.CalculateParameterCount());
            EnumerationQuery query = new EnumerationQuery(network);

            //SamplingQuery query = new SamplingQuery(network);
            query.DefineEventType(WitnessSawBlue, EventType.Evidence);
            query.DefineEventType(BlueTaxi, EventType.Outcome);
            query.SetEventValue(WitnessSawBlue, false);
            query.SetEventValue(BlueTaxi, false);
            query.Execute();
            Console.WriteLine(query.ToString());
        }
예제 #11
0
            public NetworkLayoutRecord(
                BayesianNetwork network,
                NetworkLayout layout,
                NetworkLayoutOptions options)
            {
                Debug.Assert(options != null, "Layout options cannot be null.");

                this.Network       = network;
                this.NetworkLayout = layout;
                this.Options       = options;

                // Manually specify sizes.
                Dictionary <string, float> sizes = new Dictionary <string, float>();

                foreach (var v in network.Variables)
                {
                    sizes[v.Key] = Workbench.NetworkLayoutVertexSizeNormal;
                }

                // Instantiate algorithm.
                AlgorithmState = new LayoutAlgorithm(network.Clone(), sizes, options);

                // Copy existing positions over.
                if (layout != null && layout.Positions != null && layout.Positions.Count > 0)
                {
                    foreach (var kvp in layout.Positions)
                    {
                        AlgorithmState.Positions[kvp.Key] = kvp.Value;
                    }
                }
            }
예제 #12
0
        /// <summary>
        /// Calculate the value N, which is the number of cases, from the training data, where the
        /// desiredValue matches the training data.  Only cases where the parents match the specifed
        /// parent instance are considered.
        /// </summary>
        /// <param name="network">The network to calculate for.</param>
        /// <param name="e">The event we are calculating for. (variable i)</param>
        /// <param name="parents">The parents of the specified event we are considering.</param>
        /// <param name="parentInstance">The parent instance we are looking for.</param>
        /// <returns>The value N. </returns>
        public int CalculateN(BayesianNetwork network, BayesianEvent e,
                              IList <BayesianEvent> parents, int[] parentInstance)
        {
            int result = 0;

            foreach (IMLDataPair pair in _data)
            {
                int[] d = _network.DetermineClasses(pair.Input);

                bool reject = false;

                for (int i = 0; i < parentInstance.Length; i++)
                {
                    BayesianEvent parentEvent = parents[i];
                    int           parentIndex = network.GetEventIndex(parentEvent);
                    if (parentInstance[i] != (d[parentIndex]))
                    {
                        reject = true;
                        break;
                    }
                }

                if (!reject)
                {
                    result++;
                }
            }
            return(result);
        }
예제 #13
0
        internal void RequestLoadBayesianNetwork(string uri)
        {
            Task.Factory.StartNew(delegate
            {
                WriteMessage("loading network file...");

                string name = Path.GetFileNameWithoutExtension(uri);
                string pathToNetworkFile = uri;

                BayesianNetwork network = LoadNetwork(pathToNetworkFile);

                if (network == null)
                {
                    WriteMessage("error loading network file");
                }
                else
                {
                    Dispatcher.BeginInvoke(new Action(delegate
                    {
                        SetBayesianNetwork(network, uri);
                        xRightTabs.SelectedItem = xTabVariables;
                    }));
                }
            });
        }
예제 #14
0
        public static BayesianNetwork ToBayesianNetwork(this JObject json)
        {
            string networkName = json["name"].Value <string>();

            var bn = new BayesianNetwork(networkName);

            // Load variables and note parents.
            var variableList = new List <string>();
            var parentsList  = new Dictionary <string, string[]>();

            foreach (var jrv in json.Property("variables").Value)
            {
                var rv = ToRandomVariable(jrv);
                bn.AddVariable(rv);

                variableList.Add(rv.Name);
                parentsList.Add(rv.Name, jrv["parents"].Values <string>().ToArray());
            }

            // Reparent.
            foreach (var rv in variableList)
            {
                string[] parents = parentsList[rv];
                foreach (var parentName in parents)
                {
                    bn.ConnectVariables(parentName, rv);
                }
            }

            return(bn);
        }
예제 #15
0
        /// <summary>
        /// Calculate G.
        /// </summary>
        /// <param name="network">The network to calculate for.</param>
        /// <param name="e">The event to calculate for.</param>
        /// <param name="parents">The parents.</param>
        /// <returns>The value for G.</returns>
        public double CalculateG(BayesianNetwork network,
                                 BayesianEvent e, IList <BayesianEvent> parents)
        {
            double result = 1.0;
            int    r      = e.Choices.Count;

            var args = new int[parents.Count];

            do
            {
                double n = EncogMath.Factorial(r - 1);
                double d = EncogMath.Factorial(CalculateN(network, e,
                                                          parents, args) + r - 1);
                double p1 = n / d;

                double p2 = 1;
                for (int k = 0; k < e.Choices.Count; k++)
                {
                    p2 *= EncogMath.Factorial(CalculateN(network, e, parents, args, k));
                }

                result *= p1 * p2;
            } while (EnumerationQuery.Roll(parents, args));

            return(result);
        }
        public void Execute(IExampleInterface app)
        {
            // Create a Bayesian network
            BayesianNetwork network = new BayesianNetwork();
            // Create the Uber driver event
            BayesianEvent UberDriver = network.CreateEvent("uber_driver");
            // create the witness event
            BayesianEvent WitnessSawUberDriver = network.CreateEvent("saw_uber_driver");

            // Attach the two
            network.CreateDependency(UberDriver, WitnessSawUberDriver);
            network.FinalizeStructure();

            // build the truth tables
            UberDriver?.Table?.AddLine(0.85, true);
            WitnessSawUberDriver?.Table?.AddLine(0.80, true, true);
            WitnessSawUberDriver?.Table?.AddLine(0.20, true, false);
            network.Validate();

            Console.WriteLine(network.ToString());
            Console.WriteLine($"Parameter count: {network.CalculateParameterCount()}");

            EnumerationQuery query = new EnumerationQuery(network);

            // The evidence is that someone saw the Uber driver hit the car
            query.DefineEventType(WitnessSawUberDriver, EventType.Evidence);
            // The result was the Uber driver did it
            query.DefineEventType(UberDriver, EventType.Outcome);
            query.SetEventValue(WitnessSawUberDriver, false);
            query.SetEventValue(UberDriver, false);
            query.Execute();
            Console.WriteLine(query.ToString());
        }
예제 #17
0
        public void TestEnumeration2()
        {
            BayesianNetwork network = new BayesianNetwork();
            BayesianEvent   a       = network.CreateEvent("a");
            BayesianEvent   x1      = network.CreateEvent("x1");
            BayesianEvent   x2      = network.CreateEvent("x2");
            BayesianEvent   x3      = network.CreateEvent("x3");

            network.CreateDependency(a, x1, x2, x3);
            network.FinalizeStructure();

            a.Table.AddLine(0.5, true);         // P(A) = 0.5
            x1.Table.AddLine(0.2, true, true);  // p(x1|a) = 0.2
            x1.Table.AddLine(0.6, true, false); // p(x1|~a) = 0.6
            x2.Table.AddLine(0.2, true, true);  // p(x2|a) = 0.2
            x2.Table.AddLine(0.6, true, false); // p(x2|~a) = 0.6
            x3.Table.AddLine(0.2, true, true);  // p(x3|a) = 0.2
            x3.Table.AddLine(0.6, true, false); // p(x3|~a) = 0.6
            network.Validate();

            EnumerationQuery query = new EnumerationQuery(network);

            query.DefineEventType(x1, EventType.Evidence);
            query.DefineEventType(x2, EventType.Evidence);
            query.DefineEventType(x3, EventType.Evidence);
            query.DefineEventType(a, EventType.Outcome);
            query.SetEventValue(a, true);
            query.SetEventValue(x1, true);
            query.SetEventValue(x2, true);
            query.SetEventValue(x3, false);
            query.Execute();
            TestPercent(query.Probability, 18);
        }
예제 #18
0
        public BayesianNetwork LearnBayesianNetwork()
        {
            while (true)
            {
                List<DecisionComponent<Edge>> validComponents = this.ConstructionGraph.GetValidComponents();
                if (validComponents.Count == 0 || this._stop)
                    break;

                DecisionComponent<Edge> component = this.SelectBestComponent(validComponents);
                if (component == null)
                    break;

                this._bayesianNetwork.Connect(component.Element.ParentIndex, component.Element.ChildIndex);

                this._solution.Components.Add(component);
                this.EvaluateSolutionQuality(this._solution);
                

                this.Problem.ComponentInvalidator.Invalidate(component, this._solution, this.ConstructionGraph);

                if (this.OnProgress != null)
                    this.OnProgress(this, null);

            }

            BayesianNetwork network = new BayesianNetwork(this._trainingSet.Metadata, this._solution.ToList());
            network.LearnParameters(this._trainingSet);
            return network;

        }
예제 #19
0
파일: Scenario.cs 프로젝트: HiMichelle/F-AI
 public Scenario(string id, FObservation evidence, BayesianNetwork network)
 {
     _posteriorMarginals = new Dictionary <string, FDiscreteDistribution>();
     _bayesianNetwork    = network;
     Id       = id;
     Evidence = evidence;
 }
예제 #20
0
 /// <summary>
 /// Construct a Bayesian trainer. Use K2 to search, and the SimpleEstimator
 /// to estimate probability.  Init as Naive Bayes
 /// </summary>
 /// <param name="theNetwork">The network to train.</param>
 /// <param name="theData">The data to train.</param>
 /// <param name="theMaximumParents">The max number of parents.</param>
 public TrainBayesian(BayesianNetwork theNetwork, IMLDataSet theData,
                      int theMaximumParents)
     : this(theNetwork, theData, theMaximumParents,
            BayesianInit.InitNaiveBayes, new SearchK2(),
            new SimpleEstimator())
 {
 }
예제 #21
0
        // function ELIMINATION-ASK(X, e, bn) returns a distribution over X
        /**
         * The ELIMINATION-ASK algorithm in Figure 14.11.
         * 
         * @param X
         *            the query variables.
         * @param e
         *            observed values for variables E.
         * @param bn
         *            a Bayes net with variables {X} &cup; E &cup; Y /* Y = hidden
         *            variables //
         * @return a distribution over the query variables.
         */

        public CategoricalDistribution eliminationAsk(RandomVariable[] X,
                                                      AssignmentProposition[] e, BayesianNetwork bn)
        {

            Set<RandomVariable> hidden = new Set<RandomVariable>();
            List<RandomVariable> VARS = new List<RandomVariable>();
            calculateVariables(X, e, bn, hidden, VARS);

            // factors <- []
            List<Factor> factors = new List<Factor>();
            // for each var in ORDER(bn.VARS) do
            foreach (RandomVariable var in order(bn, VARS))
            {
                // factors <- [MAKE-FACTOR(var, e) | factors]
                factors.Add(0, makeFactor(var, e, bn));
                // if var is hidden variable then factors <- SUM-OUT(var, factors)
                if (hidden.Contains(var))
                {
                    factors = sumOut(var, factors, bn);
                }
            }
            // return NORMALIZE(POINTWISE-PRODUCT(factors))
            Factor product = pointwiseProduct(factors);
            // Note: Want to ensure the order of the product matches the
            // query variables
            return ((ProbabilityTable) product.pointwiseProductPOS(_identity, X))
                .normalize();
        }
예제 #22
0
        // function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e)

        /**
         * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given
         * evidence in a Bayesian Network.
         *
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution gibbsAsk(RandomVariable[] X,
                                                AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                                    .expectedSizeOfCategoricalDistribution(X)];
            // Z, the nonevidence variables in bn
            Set <RandomVariable> Z = new Set <RandomVariable>(
                bn.getVariablesInTopologicalOrder());

            foreach (AssignmentProposition ap in e)
            {
                Z.Remove(ap.getTermVariable());
            }
            // <b>x</b>, the current state of the network, initially copied from e
            Map <RandomVariable, Object> x = new LinkedHashMap <RandomVariable, Object>();

            foreach (AssignmentProposition ap in e)
            {
                x.Add(ap.getTermVariable(), ap.getValue());
            }

            // initialize <b>x</b> with random values for the variables in Z
            foreach (RandomVariable Zi in
                     Z)
            {
                x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer));
            }

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // for each Z<sub>i</sub> in Z do
                foreach (RandomVariable Zi in
                         Z)
                {
                    // set the value of Z<sub>i</sub> in <b>x</b> by sampling from
                    // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>))
                    x.put(Zi,
                          ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer));
                }
                // Note: moving this outside the previous for loop,
                // as described in fig 14.6, as will only work
                // correctly in the case of a single query variable X.
                // However, when multiple query variables, rare events
                // will get weighted incorrectly if done above. In case
                // of single variable this does not happen as each possible
                // value gets * |Z| above, ending up with the same ratios
                // when normalized (i.e. its still more efficient to place
                // outside the loop).
                //
                // <b>N</b>[x] <- <b>N</b>[x] + 1
                // where x is the value of X in <b>x</b>
                N[ProbUtil.indexOf(X, x)] += 1.0;
            }
            // return NORMALIZE(<b>N</b>)
            return(new ProbabilityTable(N, X).normalize());
        }
예제 #23
0
 /// <inheritdoc/>
 public void Init(TrainBayesian theTrainer, BayesianNetwork theNetwork, IMLDataSet theData)
 {
     _network = theNetwork;
     _data    = theData;
     _train   = theTrainer;
     OrderNodes();
     _index = -1;
 }
예제 #24
0
        public GraphInspector()
        {
            InitializeComponent();

            this._network = null;
            this._nodes = new List<GraphNode>();
            this._edges = new List<GraphEdge>();
        }
예제 #25
0
 protected void BayesianNetworkStructureChanged(object sender, BayesianNetwork network)
 {
     // Initialize new layout record to restart layout process.
     _networkLayoutInternal = new NetworkLayoutRecord(
         network,
         _networkLayout,
         this.NetworkLayoutOptions);
 }
예제 #26
0
 /// <inheritdoc/>
 public void Init(TrainBayesian theTrainer, BayesianNetwork theNetwork, IMLDataSet theData)
 {
     _network = theNetwork;
     _data = theData;
     _train = theTrainer;
     OrderNodes();
     _index = -1;
 }
예제 #27
0
 public static JObject ToJObject(this BayesianNetwork bn)
 {
     return(new JObject(
                new JProperty("name", bn.Name),
                new JProperty("variables",
                              new JArray(bn.VariablesOrdered.Select(v => v.ToJObject()))
                              )
                ));
 }
예제 #28
0
        // function GIBBS-ASK(X, e, bn, N) returns an estimate of <b>P</b>(X|e)
        /**
         * The GIBBS-ASK algorithm in Figure 14.16. For answering queries given
         * evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution gibbsAsk(RandomVariable[] X,
                                                AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];
            // Z, the nonevidence variables in bn
            Set<RandomVariable> Z = new Set<RandomVariable>(
                bn.getVariablesInTopologicalOrder());
            foreach (AssignmentProposition ap in e)
            {
                Z.Remove(ap.getTermVariable());
            }
            // <b>x</b>, the current state of the network, initially copied from e
            Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>();
            foreach (AssignmentProposition ap in e)
            {
                x.Add(ap.getTermVariable(), ap.getValue());
            }

            // initialize <b>x</b> with random values for the variables in Z
            foreach (RandomVariable Zi in
            Z)
            {
                x.put(Zi, ProbUtil.randomSample(bn.getNode(Zi), x, randomizer));
            }

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // for each Z<sub>i</sub> in Z do
                foreach (RandomVariable Zi in
                Z)
                {
                    // set the value of Z<sub>i</sub> in <b>x</b> by sampling from
                    // <b>P</b>(Z<sub>i</sub>|mb(Z<sub>i</sub>))
                    x.put(Zi,
                          ProbUtil.mbRandomSample(bn.getNode(Zi), x, randomizer));
                }
                // Note: moving this outside the previous for loop,
                // as described in fig 14.6, as will only work
                // correctly in the case of a single query variable X.
                // However, when multiple query variables, rare events
                // will get weighted incorrectly if done above. In case
                // of single variable this does not happen as each possible
                // value gets * |Z| above, ending up with the same ratios
                // when normalized (i.e. its still more efficient to place
                // outside the loop).
                //
                // <b>N</b>[x] <- <b>N</b>[x] + 1
                // where x is the value of X in <b>x</b>
                N[ProbUtil.indexOf(X, x)] += 1.0;
            }
            // return NORMALIZE(<b>N</b>)
            return new ProbabilityTable(N, X).normalize();
        }
예제 #29
0
        /// <summary>
        /// Create a bayesian network.
        /// </summary>
        /// <param name="architecture">The architecture to use.</param>
        /// <param name="input">The input neuron count.</param>
        /// <param name="output">The output neuron count.</param>
        /// <returns>The new bayesian network.</returns>
        public IMLMethod Create(String architecture, int input,
                                int output)
        {
            var method = new BayesianNetwork {
                Contents = architecture
            };

            return(method);
        }
예제 #30
0
        /// <summary>
        /// Define the truth table.
        /// </summary>
        /// <param name="network">The bayesian network.</param>
        /// <param name="result">The resulting probability.</param>
        public void DefineTruthTable(BayesianNetwork network, double result)
        {
            ParsedEvent   childParsed = ChildEvent;
            BayesianEvent childEvent  = network.RequireEvent(childParsed.Label);

            // define truth table line
            int[] args = GetArgs(network);
            childEvent.Table.AddLine(result, childParsed.ResolveValue(childEvent), args);
        }
예제 #31
0
        public void TestPersistSerial()
        {
            BayesianNetwork network = Create();

            SerializeObject.Save(SERIAL_FILENAME.ToString(), network);
            BayesianNetwork network2 = (BayesianNetwork)SerializeObject.Load(SERIAL_FILENAME.ToString());

            Validate(network2);
        }
예제 #32
0
        public void TestPersistEG()
        {
            BayesianNetwork network = Create();

            EncogDirectoryPersistence.SaveObject(EG_FILENAME, network);
            BayesianNetwork network2 = (BayesianNetwork)EncogDirectoryPersistence.LoadObject(EG_FILENAME);

            Validate(network2);
        }
예제 #33
0
        /// <summary>
        /// Define the relationships.
        /// </summary>
        /// <param name="network">The network.</param>
        public void DefineRelationships(BayesianNetwork network)
        {
            // define event relations, if they are not there already
            ParsedEvent   childParsed = ChildEvent;
            BayesianEvent childEvent  = network.RequireEvent(childParsed.Label);

            foreach (ParsedEvent e in this.givenEvents)
            {
                BayesianEvent parentEvent = network.RequireEvent(e.Label);
                network.CreateDependency(parentEvent, childEvent);
            }
        }
예제 #34
0
        private BayesianMultinetClassifier ConstructMulitNetClassifier(List <Ant <Edge> > ants)
        {
            BayesianMultinetClassifier mnClassifier = new BayesianMultinetClassifier(this._trainingSet.Metadata);

            for (int classIndex = 0; classIndex < this._trainingSet.Metadata.Target.Values.Length; classIndex++)
            {
                BayesianNetwork bayesianNetwork = new BayesianNetwork(this._datasets[classIndex].Metadata, ants[classIndex].Solution.ToList());
                bayesianNetwork.LearnParameters(this._datasets[classIndex]);
                mnClassifier.AddBayesianNetwork(classIndex, bayesianNetwork);
            }
            return(mnClassifier);
        }
예제 #35
0
    public void MakeDecision()
    {
        // You can specify a list of evidence
        List <string> observations = new List <string> {
            "brave=" + GetIsBrave(),
            "enemy_amount=" + GetEnemyAmount(),
            "cover_type=" + GetCoverType()
        };

        // You can then use them to infer another variable in the network
        double[] fightDistribution = ve.Infer("fight", observations);
        bool     fight             = ve.PickOne(fightDistribution) == 0;

        // You can do chain interence based on previous inference results
        observations.Add("fight=" + fight);

        // The API functions are overloaded to fit your needs
        // e.g. you can use a less string-based approach if you want to do things programmatically
        BayesianNetwork network         = ve.GetNetwork();
        Proposition     braveProp       = network.FindNode("brave").Instantiate(GetIsBrave());
        Proposition     enemyAmountProp = network.FindNode("enemy_amount").Instantiate(GetEnemyAmount());
        Proposition     hasCoverProp    = network.FindNode("cover_type").Instantiate(GetCoverType());
        Proposition     fightProp       = network.FindNode("fight").Instantiate(fight.ToString());
        BayesianNode    runAwayNode     = ve.GetNetwork().FindNode("run_away");

        double[] runawayDistribution = ve.Infer(runAwayNode, braveProp, enemyAmountProp, hasCoverProp, fightProp);
        bool     runaway             = ve.PickOne(runawayDistribution) == runAwayNode.var.GetTokenIndex("True");

        // Since it is a bayesian network, you can infer any variables with partial or even no information
        ve.Infer("enemy_amount", "fight=True");
        ve.Infer("fight");

        if (enemyAmount.Equals("NoEnemy"))
        {
            decisionText.text = "Did not see any enemy.";
        }
        else if (fight)
        {
            decisionText.text = "The NPC decided to fight. ";
        }
        else if (!fight && runaway)
        {
            decisionText.text = "The NPC decided to run away.";
        }
        else
        {
            decisionText.text = "The NPC decided to wait for his chance.";
        }
        decisionText.text = "Decision made: " + decisionText.text;

        probabilityText.text = string.Format("true: {0}%\t\tfalse: {1}%\ntrue: {2}%\t\tfalse: {3}%",
                                             fightDistribution[0] * 100, fightDistribution[1] * 100, runawayDistribution[0] * 100, runawayDistribution[1] * 100);
    }
예제 #36
0
        /// <summary>
        /// Get the arguments to this event.
        /// </summary>
        /// <param name="network">The network.</param>
        /// <returns>The arguments.</returns>
        public int[] GetArgs(BayesianNetwork network)
        {
            int[] result = new int[givenEvents.Count];

            for (int i = 0; i < givenEvents.Count; i++)
            {
                ParsedEvent   givenEvent  = this.givenEvents[i];
                BayesianEvent actualEvent = network.GetEvent(givenEvent.Label);
                result[i] = givenEvent.ResolveValue(actualEvent);
            }

            return(result);
        }
예제 #37
0
        /// <summary>
        /// Get the arguments to this event.
        /// </summary>
        /// <param name="network">The network.</param>
        /// <returns>The arguments.</returns>
        public int[] GetArgs(BayesianNetwork network)
        {
            int[] result = new int[givenEvents.Count];

            for (int i = 0; i < givenEvents.Count; i++)
            {
                ParsedEvent givenEvent = this.givenEvents[i];
                BayesianEvent actualEvent = network.GetEvent(givenEvent.Label);
                result[i] = givenEvent.ResolveValue(actualEvent);
            }

            return result;
        }
예제 #38
0
        public static IQueryResult EnumerationAsk(BayesianNetwork network, Query query, List <Evidence> evidences)
        {
            var ev = evidences.FirstOrDefault(e => e.Node == query.Node);

            if (ev != null)
            {
                return(new QueryResult(query, ev.Report == query.Question ? 1.0 : 0.0));
            }

            var u1 = EnumerateAll(network.Nodes, Extend(evidences, query.Node, query.Question));
            var u2 = EnumerateAll(network.Nodes, Extend(evidences, query.Node, !query.Question));

            return(new QueryResult(query, u1 / (u1 + u2)));
        }
예제 #39
0
        // function PRIOR-SAMPLE(bn) returns an event sampled from the prior
        // specified by bn
        /**
         * The PRIOR-SAMPLE algorithm in Figure 14.13. A sampling algorithm that
         * generates events from a Bayesian network. Each variable is sampled
         * according to the conditional distribution given the values already
         * sampled for the variable's parents.
         * 
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @return an event sampled from the prior specified by bn
         */

        public Map<RandomVariable, Object> priorSample(BayesianNetwork bn)
        {
            // x <- an event with n elements
            Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>();
            // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do
            foreach (RandomVariable Xi in bn.getVariablesInTopologicalOrder())
            {
                // x[i] <- a random sample from
                // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>))
                x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer));
            }
            // return x
            return x;
        }
예제 #40
0
        public BayesianNetwork Create()
        {
            BayesianNetwork network = new BayesianNetwork();
            BayesianEvent   a       = network.CreateEvent("a");
            BayesianEvent   b       = network.CreateEvent("b");

            network.CreateDependency(a, b);
            network.FinalizeStructure();
            a.Table.AddLine(0.5, true);        // P(A) = 0.5
            b.Table.AddLine(0.2, true, true);  // p(b|a) = 0.2
            b.Table.AddLine(0.8, true, false); // p(b|~a) = 0.8
            network.Validate();
            return(network);
        }
예제 #41
0
        private BayesianNetwork LoadNetwork(string uri)
        {
            string data = App.Current.LoadData(uri);

            if (string.IsNullOrWhiteSpace(data))
            {
                return(null);
            }

            JObject data_j = JObject.Parse(data);

            BayesianNetwork bn = data_j.ToBayesianNetwork();

            return(bn);
        }
예제 #42
0
        // function LIKELIHOOD-WEIGHTING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)
        /**
         * The LIKELIHOOD-WEIGHTING algorithm in Figure 14.15. For answering queries
         * given evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @param N
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution likelihoodWeighting(RandomVariable[] X,
                                                           AssignmentProposition[] e, BayesianNetwork bn, int N)
        {
            // local variables: W, a vector of weighted counts for each value of X,
            // initially zero
            double[] W = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < N; j++)
            {
                // <b>x</b>,w <- WEIGHTED-SAMPLE(bn,e)
                Pair<Map<RandomVariable, Object>, Double> x_w = weightedSample(bn,
                                                                               e);
                // W[x] <- W[x] + w where x is the value of X in <b>x</b>
                W[ProbUtil.indexOf(X, x_w.getFirst())] += x_w.getSecond();
            }
            // return NORMALIZE(W)
            return new ProbabilityTable(W, X).normalize();
        }
예제 #43
0
        public DynamicBayesNet(BayesianNetwork priorNetwork,
                               Map<RandomVariable, RandomVariable> X_0_to_X_1,
                               Set<RandomVariable> E_1, params Node[] rootNodes)
            : base(rootNodes)
        {


            foreach (RandomVariable rv in X_0_to_X_1.keySet()
                )
            {
                RandomVariable x0 = rv;
                RandomVariable x1 = X_0_to_X_1[rv];
                this.X_0.add(x0);
                this.X_1.add(x1);
                this.X_0_to_X_1.put(x0, x1);
                this.X_1_to_X_0.put(x1, x0);
            }
            this.E_1.addAll(new List<RandomVariable>(E_1));

            // Assert the X_0, X_1, and E_1 sets are of expected sizes
            Set<RandomVariable> combined = new Set<RandomVariable>();
            combined.addAll(new List<RandomVariable>(X_0));
            combined.addAll(new List<RandomVariable>(X_1));
            combined.addAll(new List<RandomVariable>(E_1));
            if (
                SetOps.difference(new List<RandomVariable>(varToNodeMap.keySet()), new List<RandomVariable>(combined)).
                    Count != 0)
            {
                throw new IllegalArgumentException(
                    "X_0, X_1, and E_1 do not map correctly to the Nodes describing this Dynamic Bayesian Network.");
            }
            this.priorNetwork = priorNetwork;

            X_1_VariablesInTopologicalOrder
                .AddRange(getVariablesInTopologicalOrder());
            X_1_VariablesInTopologicalOrder.RemoveAll(X_0);
            X_1_VariablesInTopologicalOrder.RemoveAll(E_1);
        }
예제 #44
0
        /// <summary>
        /// Parse a probability list. 
        /// </summary>
        /// <param name="network">The network to parse for.</param>
        /// <param name="line">The line to parse.</param>
        /// <returns>The parsed list.</returns>
        public static IList<ParsedProbability> ParseProbabilityList(BayesianNetwork network, String line)
        {
            IList<ParsedProbability> result = new List<ParsedProbability>();

            StringBuilder prob = new StringBuilder();
            for (int i = 0; i < line.Length; i++)
            {
                char ch = line[i];
                if (ch == ')')
                {
                    prob.Append(ch);
                    ParseProbability parse = new ParseProbability(network);
                    ParsedProbability parsedProbability = parse.Parse(prob.ToString());
                    result.Add(parsedProbability);
                    prob.Length = 0;
                }
                else
                {
                    prob.Append(ch);
                }
            }
            return result;
        }
예제 #45
0
        // function REJECTION-SAMPLING(X, e, bn, N) returns an estimate of
        // <b>P</b>(X|e)
        /**
         * The REJECTION-SAMPLING algorithm in Figure 14.14. For answering queries
         * given evidence in a Bayesian Network.
         * 
         * @param X
         *            the query variables
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network
         * @param Nsamples
         *            the total number of samples to be generated
         * @return an estimate of <b>P</b>(X|e)
         */

        public CategoricalDistribution rejectionSampling(RandomVariable[] X,
                                                         AssignmentProposition[] e, BayesianNetwork bn, int Nsamples)
        {
            // local variables: <b>N</b>, a vector of counts for each value of X,
            // initially zero
            double[] N = new double[ProbUtil
                .expectedSizeOfCategoricalDistribution(X)];

            // for j = 1 to N do
            for (int j = 0; j < Nsamples; j++)
            {
                // <b>x</b> <- PRIOR-SAMPLE(bn)
                Map<RandomVariable, Object> x = ps.priorSample(bn);
                // if <b>x</b> is consistent with e then
                if (isConsistent(x, e))
                {
                    // <b>N</b>[x] <- <b>N</b>[x] + 1
                    // where x is the value of X in <b>x</b>
                    N[ProbUtil.indexOf(X, x)] += 1.0;
                }
            }
            // return NORMALIZE(<b>N</b>)
            return new ProbabilityTable(N, X).normalize();
        }
예제 #46
0
        private List<Factor> sumOut(RandomVariable var, List<Factor> factors,
                                    BayesianNetwork bn)
        {
            List<Factor> summedOutFactors = new List<Factor>();
            List<Factor> toMultiply = new List<Factor>();
            foreach (Factor f in factors)
            {
                if (f.contains(var))
                {
                    toMultiply.Add(f);
                }
                else
                {
                    // This factor does not contain the variable
                    // so no need to sum out - see AIMA3e pg. 527.
                    summedOutFactors.Add(f);
                }
            }

            summedOutFactors.Add(pointwiseProduct(toMultiply).sumOut(var));

            return summedOutFactors;
        }
예제 #47
0
 //
 // START-BayesInference
 public CategoricalDistribution ask(RandomVariable[] X,
                                    AssignmentProposition[] observedEvidence,
                                    BayesianNetwork bn)
 {
     return this.eliminationAsk(X, observedEvidence, bn);
 }
 /// <inheritdoc/>
 public void Init(TrainBayesian theTrainer, BayesianNetwork theNetwork, IMLDataSet theData)
 {
     _network = theNetwork;
     _data = theData;
     _index = 0;
 }
예제 #49
0
        /// <summary>
        /// Calculate the value N, which is the number of cases, from the training data, where the
        /// desiredValue matches the training data.  Only cases where the parents match the specifed
        /// parent instance are considered.
        /// </summary>
        /// <param name="network">The network to calculate for.</param>
        /// <param name="e">The event we are calculating for. (variable i)</param>
        /// <param name="parents">The parents of the specified event we are considering.</param>
        /// <param name="parentInstance">The parent instance we are looking for.</param>
        /// <returns>The value N. </returns>
        public int CalculateN(BayesianNetwork network, BayesianEvent e,
                              IList<BayesianEvent> parents, int[] parentInstance)
        {
            int result = 0;

            foreach (IMLDataPair pair in _data)
            {
                int[] d = _network.DetermineClasses(pair.Input);

                bool reject = false;

                for (int i = 0; i < parentInstance.Length; i++)
                {
                    BayesianEvent parentEvent = parents[i];
                    int parentIndex = network.GetEventIndex(parentEvent);
                    if (parentInstance[i] != (d[parentIndex]))
                    {
                        reject = true;
                        break;
                    }
                }

                if (!reject)
                {
                    result++;
                }
            }
            return result;
        }
예제 #50
0
        /// <summary>
        /// Construct a Bayesian trainer. 
        /// </summary>
        /// <param name="theNetwork">The network to train.</param>
        /// <param name="theData">The data to train with.</param>
        /// <param name="theMaximumParents">The maximum number of parents.</param>
        /// <param name="theInit">How to init the new Bayes network.</param>
        /// <param name="theSearch">The search method.</param>
        /// <param name="theEstimator">The estimation mehod.</param>
        public TrainBayesian(BayesianNetwork theNetwork, IMLDataSet theData,
                             int theMaximumParents, BayesianInit theInit, IBayesSearch theSearch,
                             IBayesEstimator theEstimator)
            : base(TrainingImplementationType.Iterative)
        {
            _network = theNetwork;
            _data = theData;
            _maximumParents = theMaximumParents;

            _search = theSearch;
            _search.Init(this, theNetwork, theData);

            _estimator = theEstimator;
            _estimator.Init(this, theNetwork, theData);

            _initNetwork = theInit;
            Error = 1.0;
        }
 /// <summary>
 /// Default constructor.
 /// </summary>
 protected BasicQuery()
 {
     _network = null;
 }
예제 #52
0
        // function WEIGHTED-SAMPLE(bn, e) returns an event and a weight
        /**
         * The WEIGHTED-SAMPLE function in Figure 14.15.
         * 
         * @param e
         *            observed values for variables E
         * @param bn
         *            a Bayesian network specifying joint distribution
         *            <b>P</b>(X<sub>1</sub>,...,X<sub>n</sub>)
         * @return return <b>x</b>, w - an event with its associated weight.
         */

        public Pair<Map<RandomVariable, Object>, Double> weightedSample(
            BayesianNetwork bn, AssignmentProposition[] e)
        {
            // w <- 1;
            double w = 1.0;
            // <b>x</b> <- an event with n elements initialized from e
            Map<RandomVariable, Object> x = new LinkedHashMap<RandomVariable, Object>();
            foreach (AssignmentProposition ap in
            e)
            {
                x.Add(ap.getTermVariable(), ap.getValue());
            }

            // foreach variable X<sub>i</sub> in X<sub>1</sub>,...,X<sub>n</sub> do
            foreach (RandomVariable Xi in
            bn.getVariablesInTopologicalOrder())
            {
                // if X<sub>i</sub> is an evidence variable with value x<sub>i</sub>
                // in e
                if (x.ContainsKey(Xi))
                {
                    // then w <- w * P(X<sub>i</sub> = x<sub>i</sub> |
                    // parents(X<sub>i</sub>))
                    w *= bn.getNode(Xi)
                        .getCPD()
                        .getValue(
                            ProbUtil.getEventValuesForXiGivenParents(
                                bn.getNode(Xi), x));
                }
                else
                {
                    // else <b>x</b>[i] <- a random sample from
                    // <b>P</b>(X<sub>i</sub> | parents(X<sub>i</sub>))
                    x.Add(Xi, ProbUtil.randomSample(bn.getNode(Xi), x, randomizer));
                }
            }
            // return <b>x</b>, w
            return new Pair<Map<RandomVariable, Object>, Double>(x, w);
        }
예제 #53
0
        /// <summary>
        /// Define the truth table. 
        /// </summary>
        /// <param name="network">The bayesian network.</param>
        /// <param name="result">The resulting probability.</param>
        public void DefineTruthTable(BayesianNetwork network, double result)
        {
            ParsedEvent childParsed = ChildEvent;
            BayesianEvent childEvent = network.RequireEvent(childParsed.Label);

            // define truth table line
            int[] args = GetArgs(network);
            childEvent.Table.AddLine(result, childParsed.ResolveValue(childEvent), args);
        }
 /// <summary>
 /// Parse the probability for the specified network. 
 /// </summary>
 /// <param name="theNetwork">The network to parse for.</param>
 public ParseProbability(BayesianNetwork theNetwork)
 {
     this.network = theNetwork;
 }
 /// <summary>
 /// Construct a basic query.
 /// </summary>
 /// <param name="theNetwork">The network to use for this query.</param>
 protected BasicQuery(BayesianNetwork theNetwork)
 {
     _network = theNetwork;
     FinalizeStructure();
 }
예제 #56
0
 /// <summary>
 /// Construct a sampling query. 
 /// </summary>
 /// <param name="theNetwork">The network that will be queried.</param>
 public SamplingQuery(BayesianNetwork theNetwork)
     : base(theNetwork)
 {
     SampleSize = DefaultSampleSize;
 }
예제 #57
0
 /// <summary>
 /// Define the relationships.
 /// </summary>
 /// <param name="network">The network.</param>
 public void DefineRelationships(BayesianNetwork network)
 {
     // define event relations, if they are not there already
     ParsedEvent childParsed = ChildEvent;
     BayesianEvent childEvent = network.RequireEvent(childParsed.Label);
     foreach (ParsedEvent e in this.givenEvents)
     {
         BayesianEvent parentEvent = network.RequireEvent(e.Label);
         network.CreateDependency(parentEvent, childEvent);
     }
 }
예제 #58
0
        //
        // START-BayesSampleInference

        public CategoricalDistribution ask(RandomVariable[] X,
                                           AssignmentProposition[] observedEvidence,
                                           BayesianNetwork bn, int N)
        {
            return likelihoodWeighting(X, observedEvidence, bn, N);
        }
예제 #59
0
 /// <inheritdoc/>
 public void Init(TrainBayesian theTrainer, BayesianNetwork theNetwork,
     IMLDataSet theData)
 {
 }
예제 #60
0
 /// <summary>
 /// Construct the enumeration query.
 /// </summary>
 /// <param name="theNetwork">The Bayesian network to query.</param>
 public EnumerationQuery(BayesianNetwork theNetwork)
     : base(theNetwork)
 {
 }