Beispiel #1
0
        private void SetPriors(AllergenData data, int numVulnerabilities, Beliefs beliefs)
        {
            int  nY = AllergenData.NumYears;
            int  nN = data.DataCountChild.Length;
            int  nA = data.NumAllergens;
            bool useUniformClassPrior = true;

            if (beliefs == null)
            {
                this.probSensClassPrior.ObservedValue     = useUniformClassPrior ? Dirichlet.PointMass(Vector.Constant(numVulnerabilities, 1.0 / numVulnerabilities)) : Dirichlet.Symmetric(numVulnerabilities, 0.1);
                this.probSens1Prior.ObservedValue         = Util.ArrayInit(nA, numVulnerabilities, (a, v) => new Beta(1, 1));
                this.probGainPrior.ObservedValue          = Util.ArrayInit(nY, y => Util.ArrayInit(nA, numVulnerabilities, (a, v) => new Beta(1, 1)));
                this.probRetainPrior.ObservedValue        = Util.ArrayInit(nY, y => Util.ArrayInit(nA, numVulnerabilities, (a, v) => new Beta(1, 1)));
                this.probSkinIfSensPrior.ObservedValue    = new Beta(2.0, 1);
                this.probSkinIfNotSensPrior.ObservedValue = new Beta(1, 2.0);
                this.probIgeIfSensPrior.ObservedValue     = new Beta(2.0, 1);
                this.probIgeIfNotSensPrior.ObservedValue  = new Beta(1, 2.0);
            }
            else
            {
                this.probSensClassPrior.ObservedValue = beliefs.ProbVulnerabilityClass;
                probSens1Prior.ObservedValue          = Util.ArrayInit(nA, numVulnerabilities, (a, v) => beliefs.ProbSensitizationAgeOne[a, v]);
                probGainPrior.ObservedValue           = Util.ArrayInit(nY, y => Util.ArrayInit(nA, numVulnerabilities, (a, v) => beliefs.ProbGainSensitization[y][a, v]));
                probRetainPrior.ObservedValue         = Util.ArrayInit(nY, y => Util.ArrayInit(nA, numVulnerabilities, (a, v) => beliefs.ProbRetainSensitization[y][a, v]));
                probSkinIfSensPrior.ObservedValue     = beliefs.ProbSkinIfSensitized;
                probSkinIfNotSensPrior.ObservedValue  = beliefs.ProbSkinIfNotSensitized;
                probIgeIfSensPrior.ObservedValue      = beliefs.ProbIgEIfSensitized;
                probIgeIfNotSensPrior.ObservedValue   = beliefs.ProbIgEIfNotSensitized;
            }
        }
Beispiel #2
0
 /// <summary>
 /// Sets prior probabilities for all parameters
 /// </summary>
 /// <param name="probAge"></param>
 /// <param name="probGender"></param>
 /// <param name="probGambler"></param>
 /// <param name="probEducation"></param>
 /// <param name="cptPaymentHistory"></param>
 /// <param name="cptRatioOfDebtsToIncome"></param>
 /// <param name="cptIncome"></param>
 /// <param name="cptAssets"></param>
 /// <param name="cptReliability"></param>
 /// <param name="cptFutureIncome"></param>
 /// <param name="cptCreditWorthiness"></param>
 /// <returns></returns>
 public void SetPriorProbabilities(
     Vector probAge,
     Vector probGender,
     Vector probGambler,
     Vector probEducation,
     Vector[] cptPaymentHistory,
     Vector[] cptRatioOfDebtsToIncome,
     Vector[] cptIncome,
     Vector[] cptAssets,
     Vector[][][] cptReliability,
     Vector[][][] cptFutureIncome,
     Vector[][][] cptCreditWorthiness)
 {
     ProbAgePrior.ObservedValue                 = Dirichlet.PointMass(probAge);
     ProbGenderPrior.ObservedValue              = Dirichlet.PointMass(probGender);
     ProbGamblerPrior.ObservedValue             = Dirichlet.PointMass(probGambler);
     ProbEducationPrior.ObservedValue           = Dirichlet.PointMass(probEducation);
     CPTPaymentHistoryPrior.ObservedValue       = cptPaymentHistory.Select(v => Dirichlet.PointMass(v)).ToArray();
     CPTRatioOfDebtsToIncomePrior.ObservedValue = cptRatioOfDebtsToIncome.Select(v => Dirichlet.PointMass(v)).ToArray();
     CPTIncomePrior.ObservedValue               = cptIncome.Select(v => Dirichlet.PointMass(v)).ToArray();
     CPTAssetsPrior.ObservedValue               = cptAssets.Select(v => Dirichlet.PointMass(v)).ToArray();
     CPTReliabilityPrior.ObservedValue          = cptReliability.Select(vaa => vaa.Select(va => va.Select(v => Dirichlet.PointMass(v)).ToArray()).ToArray()).ToArray();
     CPTFutureIncomePrior.ObservedValue         = cptFutureIncome.Select(vaa => vaa.Select(va => va.Select(v => Dirichlet.PointMass(v)).ToArray()).ToArray()).ToArray();
     CPTCreditWorthinessPrior.ObservedValue     = cptCreditWorthiness.Select(vaa => vaa.Select(va => va.Select(v => Dirichlet.PointMass(v)).ToArray()).ToArray()).ToArray();
 }
        public virtual void SolveSudoku(GrilleSudoku s)
        {
            Dirichlet[] dirArray = Enumerable.Repeat(Dirichlet.Uniform(CellDomain.Count), CellIndices.Count).ToArray();

            //On affecte les valeurs fournies par le masque à résoudre en affectant les distributions de probabilités initiales
            foreach (var cellIndex in GrilleSudoku.IndicesCellules)
            {
                if (s.Cellules[cellIndex] > 0)
                {
                    //Vector v = Vector.Zero(CellDomain.Count);
                    //v[s.Cellules[cellIndex] - 1] = 1.0;


                    //Todo: Alternative: le fait de mettre une proba non nulle permet d'éviter l'erreur "zero probability" du Sudoku Easy-n°2, mais le Easy#3 n'est plus résolu
                    //tentative de changer la probabilite pour solver le sudoku 3 infructueuse
                    Vector v = Vector.Constant(CellDomain.Count, EpsilonProba);
                    v[s.Cellules[cellIndex] - 1] = FixedValueProba;

                    dirArray[cellIndex] = Dirichlet.PointMass(v);
                }
            }

            CellsPrior.ObservedValue = dirArray;


            // Todo: tester en inférant sur d'autres variables aléatoire,
            // et/ou en ayant une approche itérative: On conserve uniquement les cellules dont les valeurs ont les meilleures probabilités
            //et on réinjecte ces valeurs dans CellsPrior comme c'est également fait dans le projet neural nets.
            //

            // IFunction draw_categorical(n)// where n is the number of samples to draw from the categorical distribution
            // {
            //
            // r = 1

            /* for (i=0; i<9; i++)
             *          for (j=0; j<9; j++)
             *                  for (k=0; k<9; k++)
             *                          ps[i][j][k] = probs[i][j][k].p; */


            //DistributionRefArray<Discrete, int> cellsPosterior = (DistributionRefArray<Discrete, int>)InferenceEngine.Infer(Cells);
            //var cellValues = cellsPosterior.Point.Select(i => i + 1).ToList();

            //Autre possibilité de variable d'inférence (bis)
            Dirichlet[] cellsProbsPosterior = InferenceEngine.Infer <Dirichlet[]>(ProbCells);

            foreach (var cellIndex in GrilleSudoku.IndicesCellules)
            {
                if (s.Cellules[cellIndex] == 0)
                {
                    //s.Cellules[cellIndex] = cellValues[cellIndex];


                    var mode  = cellsProbsPosterior[cellIndex].GetMode();
                    var value = mode.IndexOf(mode.Max()) + 1;
                    s.Cellules[cellIndex] = value;
                }
            }
        }
Beispiel #4
0
 /// <summary>
 /// Gets random initialisation for <see cref="Theta"/>. This initialises downward messages from <see cref="Theta"/>.
 /// The sole purpose is to break symmetry in the inference - it does not change the model.
 /// </summary>
 /// <param name="sparsity">The sparsity settings</param>
 /// <returns></returns>
 /// <remarks>This is implemented so as to support sparse initialisations</remarks>
 public static Dirichlet[] GetInitialisation(
     int numDocs, int numTopics, Sparsity sparsity)
 {
     return(Util.ArrayInit(numDocs, i =>
     {
         // Choose a random topic
         Vector v = Vector.Zero(numTopics, sparsity);
         int topic = Rand.Int(numTopics);
         v[topic] = 1.0;
         return Dirichlet.PointMass(v);
     }));
 }
Beispiel #5
0
        public virtual void SolveSudoku(GrilleSudoku s)
        {
            Dirichlet[] dirArray = Enumerable.Repeat(Dirichlet.Uniform(CellDomain.Count), CellIndices.Count).ToArray();

            //On affecte les valeurs fournies par le masque à résoudre en affectant les distributions de probabilités initiales
            foreach (var cellIndex in GrilleSudoku.IndicesCellules)
            {
                if (s.Cellules[cellIndex] > 0)
                {
                    Vector v = Vector.Zero(CellDomain.Count);
                    v[s.Cellules[cellIndex] - 1] = 1.0;


                    //Todo: Alternative: le fait de mettre une proba non nulle permet d'éviter l'erreur "zero probability" du Sudoku Easy-n°2, mais le Easy#3 n'est plus résolu

                    //Vector v = Vector.Constant(CellDomain.Count, EpsilonProba);
                    //v[s.Cellules[cellIndex] - 1] = FixedValueProba;

                    dirArray[cellIndex] = Dirichlet.PointMass(v);
                }
            }

            CellsPrior.ObservedValue = dirArray;


            // Todo: tester en inférant sur d'autres variables aléatoire,
            // et/ou en ayant une approche itérative: On conserve uniquement les cellules dont les valeurs ont les meilleures probabilités et on réinjecte ces valeurs dans CellsPrior comme c'est également fait dans le projet neural nets.



            DistributionRefArray <Discrete, int> cellsPosterior = (DistributionRefArray <Discrete, int>)InferenceEngine.Infer(Cells);
            var cellValues = cellsPosterior.Point.Select(i => i + 1).ToList();

            //Autre possibilité de variable d'inférence (bis)
            //Dirichlet[] cellsProbsPosterior = InferenceEngine.Infer<Dirichlet[]>(ProbCells);

            foreach (var cellIndex in GrilleSudoku.IndicesCellules)
            {
                if (s.Cellules[cellIndex] == 0)
                {
                    s.Cellules[cellIndex] = cellValues[cellIndex];

                    //Autre possibilité de variable d'inférence (bis)
                    //var mode = cellsProbsPosterior[cellIndex].GetMode();
                    //var value = mode.IndexOf(1.0) + 1;
                    //s.Cellules[cellIndex] = value;
                }
            }
        }
Beispiel #6
0
        /// <summary>
        /// Gets random initialisation for <see cref="Theta"/>. This initialises downward messages from <see cref="Theta"/>.
        /// The sole purpose is to break symmetry in the inference - it does not change the model.
        /// </summary>
        /// <param name="sparsity">The sparsity settings</param>
        /// <returns></returns>
        /// <remarks>This is implemented so as to support sparse initialisations</remarks>
        public static IDistribution <Vector[]> GetInitialisation(
            int numDocs, int numTopics, Sparsity sparsity)
        {
            var initTheta = new Dirichlet[numDocs];

            for (int i = 0; i < numDocs; i++)
            {
                // Choose a random topic
                var v     = Vector.Zero(numTopics, sparsity);
                int topic = Rand.Int(numTopics);
                v[topic]     = 1.0;
                initTheta[i] = Dirichlet.PointMass(v);
            }
            return(Distribution <Vector> .Array(initTheta));
        }
Beispiel #7
0
        /// <summary>
        /// Returns the probability of Rain given optional readings on
        /// cloudiness, sprinkler, and wetness of grass, and given known parameters.
        /// </summary>
        /// <param name="cloudy">Optional observation of cloudy or not</param>
        /// <param name="sprinkler">Optional observation of whether sprinkler is on or not</param>
        /// <param name="wet">Optional observation or whether grass is wet or not</param>
        /// <param name="probCloudy">Cloudiness probability vector</param>
        /// <param name="cptSprinkler">Sprinkler conditional probability table</param>
        /// <param name="cptRain">Rain conditional probability table</param>
        /// <param name="cptWetGrass">Wet grass conditional probability table</param>
        /// <returns>Probability that it has rained</returns>
        public double ProbRain(
            int?cloudy,
            int?sprinkler,
            int?wet,
            Vector probCloudy,
            Vector[] cptSprinkler,
            Vector[] cptRain,
            Vector[][] cptWetGrass)
        {
            var probCloudyPrior   = Dirichlet.PointMass(probCloudy);
            var cptSprinklerPrior = cptSprinkler.Select(v => Dirichlet.PointMass(v)).ToArray();
            var cptRainPrior      = cptRain.Select(v => Dirichlet.PointMass(v)).ToArray();
            var cptWetGrassPrior  = cptWetGrass.Select(va => va.Select(v => Dirichlet.PointMass(v)).ToArray()).ToArray();

            return(ProbRain(cloudy, sprinkler, wet, probCloudyPrior, cptSprinklerPrior, cptRainPrior, cptWetGrassPrior));
        }
Beispiel #8
0
 /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="DiscreteFromDirichletOp"]/message_doc[@name="AverageLogFactor(Discrete, Vector)"]/*'/>
 public static double AverageLogFactor(Discrete sample, Vector probs)
 {
     return(AverageLogFactor(sample, Dirichlet.PointMass(probs)));
 }