public void IndexOfMaximumDiverges()
        {
            int numberOfFeatures  = 10;
            int numberOfFolders   = 3;
            int numEmails         = 1000;
            var featuresCountsObs = Enumerable.Range(0, numEmails).Select(o => Rand.Binomial(numberOfFeatures, 5.0 / numberOfFeatures)).ToArray();
            var featureIndicesObs = featuresCountsObs.Select(o => Rand.Perm(numberOfFeatures).ToList().GetRange(0, o).ToArray()).ToArray();
            //var trueFeatureWeights = Enumerable.Range(0, numberOfFolders).Select(p=>Enumerable.Range(0, numberOfFeatures).Select(o => Rand.Normal()).ToArray()).ToArray();
            //var folders = featureIndicesObs.Select(fi=>fi.Select(p=>trueFeatureWeights.Select(q=>q[p]).Sum()
            // random data for now!
            var folders = Enumerable.Range(0, numEmails).Select(o => Rand.Int(numberOfFolders)).ToArray();

            Range numberOfFeaturesRange = new Range(numberOfFeatures).Named("NumberOfFeaturesRange");

            numberOfFeaturesRange.AddAttribute(new Sequential()); // This requires new build of Infer.NET

            // creat a range for the number of classes
            Range numberOfClases = new Range(numberOfFolders).Named("NumberOfClassesRange");

            // Model the total number of items
            var numberOfItems = Variable.New <int>().Named("numberOfItems");

            numberOfItems.ObservedValue = numEmails;
            Range numberOfItemsRange = new Range(numberOfItems).Named("numberOfItemsRange");

            numberOfItemsRange.AddAttribute(new Sequential());

            // Model the number features present in each item in each class
            var   featureCounts         = Variable.Array <int>(numberOfItemsRange).Named("featureCounts");
            Range featureCountItemRange = new Range(featureCounts[numberOfItemsRange]).Named("featureItemCountRange");

            featureCounts.ObservedValue = featuresCountsObs;

            // Model the features we observe
            var featureIndicies = Variable.Array(Variable.Array <int>(featureCountItemRange), numberOfItemsRange).Named("featureIndicies");

            featureIndicies.ObservedValue = featureIndicesObs;
            // Setup the priors
            var FeatureWeights = Variable.Array(Variable.Array <double>(numberOfFeaturesRange), numberOfClases).Named("FeatureWeights");

            FeatureWeights[numberOfClases][numberOfFeaturesRange] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(numberOfClases, numberOfFeaturesRange);

            // Setup the label value (Folder)
            var folderValue = Variable.Array <int>(numberOfItemsRange).Named("folderValue");

            folderValue.ObservedValue = folders;

            var sparseWeightVector =
                Variable.Array(Variable.Array(Variable.Array <double>(featureCountItemRange), numberOfClases), numberOfItemsRange).Named("sparseWeightVector");

            ;
            sparseWeightVector[numberOfItemsRange][numberOfClases] = Variable.Subarray <double>(FeatureWeights[numberOfClases], featureIndicies[numberOfItemsRange]);
            var scoresWithNoise = Variable.Array(Variable.Array <double>(numberOfClases), numberOfItemsRange).Named("scoresWithNoise");

            scoresWithNoise[numberOfItemsRange][numberOfClases] = Variable.GaussianFromMeanAndVariance(Variable.Sum(sparseWeightVector[numberOfItemsRange][numberOfClases]), 1);
            folderValue[numberOfItemsRange] = Variable <int> .Factor(MMath.IndexOfMaximumDouble, scoresWithNoise[numberOfItemsRange]);

            folderValue.AddAttribute(new MarginalPrototype(Discrete.Uniform(numberOfClases.SizeAsInt)));

            var ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(FeatureWeights));
        }
示例#2
0
        private void LearnAPIClick5LabelModel(
            int numLabels,
            bool learnScoreMean,
            bool learnScorePrec,
            bool learnJudgePrec,
            bool learnClickPrec,
            bool learnThresholds,
            double nominalScoreMean,
            double nominalScorePrec,
            double nominalJudgePrec,
            double nominalClickPrec,
            int[] labels,
            int[] clicks,
            int[] exams,
            int chunkSize,
            int nPasses,
            bool printToConsole,
            out Gaussian margScoreMean,
            out Gamma margScorePrec,
            out Gamma margJudgePrec,
            out Gamma margClickPrec,
            out Gaussian[] margThresh)
        {
            //------------------------------------------------------
            // Observations
            //------------------------------------------------------
            Gaussian[][][] allObs    = getClickObservations(numLabels, chunkSize, labels, clicks, exams);
            int            numChunks = allObs.Length;

            ////-------------------------------------------------------------
            //// Prior distributions
            ////-------------------------------------------------------------
            Gaussian priorScoreMean = Gaussian.FromMeanAndVariance(nominalScoreMean, learnScoreMean ? 1 : 0);
            Gamma    priorScorePrec = Gamma.FromMeanAndVariance(nominalScorePrec, learnScorePrec ? 1 : 0);
            Gamma    priorJudgePrec = Gamma.FromMeanAndVariance(nominalJudgePrec, learnJudgePrec ? 1 : 0);
            Gamma    priorClickPrec = Gamma.FromMeanAndVariance(nominalClickPrec, learnClickPrec ? 1 : 0);

            Gaussian[] priorThreshMean;
            CalculatePriors(learnThresholds, numLabels, out priorThreshMean);

            ////-----------------------------------------------------
            //// Creates shared variables
            ////-----------------------------------------------------
            int numThresholds = numLabels + 1;
            SharedVariable <double> scoreMean = SharedVariable <double> .Random(priorScoreMean).Named("scoreMean");

            SharedVariable <double> scorePrec = SharedVariable <double> .Random(priorScorePrec).Named("scorePrec");

            SharedVariable <double> judgePrec = SharedVariable <double> .Random(priorJudgePrec).Named("judgePrec");

            SharedVariable <double> clickPrec = SharedVariable <double> .Random(priorClickPrec).Named("clickPrec");

            SharedVariable <double>[] thresholds = new SharedVariable <double> [numThresholds];
            for (int t = 0; t < numThresholds; t++)
            {
                thresholds[t] = SharedVariable <double> .Random(priorThreshMean[t]).Named("threshMeans" + t);
            }

            //----------------------------------------------------------------------------------
            // The model
            //----------------------------------------------------------------------------------

            Model model = new Model(numChunks);

            VariableArray <Gaussian>[] clickObs       = new VariableArray <Gaussian> [numLabels];
            Variable <int>[]           clickObsLength = new Variable <int> [numLabels];

            for (int i = 0; i < numLabels; i++)
            {
                clickObsLength[i] = Variable.New <int>().Named("clickObsLength" + i);
                Range r = new Range(clickObsLength[i]).Named("dataCount" + i);
                clickObs[i] = Variable.Array <Gaussian>(r).Named("Obs" + i);
                VariableArray <double> scores  = Variable.Array <double>(r).Named("scores" + i);
                VariableArray <double> scoresJ = Variable.Array <double>(r).Named("scoresJ" + i);
                VariableArray <double> scoresC = Variable.Array <double>(r).Named("scoresC" + i);
                scores[r] = Variable <double> .GaussianFromMeanAndPrecision(scoreMean.GetCopyFor(model), scorePrec.GetCopyFor(model)).ForEach(r);

                scoresJ[r] = Variable <double> .GaussianFromMeanAndPrecision(scores[r], judgePrec.GetCopyFor(model));

                scoresC[r] = Variable <double> .GaussianFromMeanAndPrecision(scores[r], clickPrec.GetCopyFor(model));

                Variable.ConstrainBetween(scoresJ[r], thresholds[i].GetCopyFor(model), thresholds[i + 1].GetCopyFor(model));
                Variable.ConstrainEqualRandom <double, Gaussian>(scoresC[r], clickObs[i][r]);
                r.AddAttribute(new Sequential());
            }

            InferenceEngine engine = new InferenceEngine();

            //----------------------------------------------------------
            // Outer loop iterates over a number of passes
            // Inner loop iterates over the unique labels
            //----------------------------------------------------------
            for (int pass = 0; pass < nPasses; pass++)
            {
                for (int c = 0; c < numChunks; c++)
                {
                    for (int i = 0; i < numLabels; i++)
                    {
                        clickObsLength[i].ObservedValue = allObs[c][i].Length;
                        clickObs[i].ObservedValue       = allObs[c][i];
                    }

                    // Infer the output messages
                    model.InferShared(engine, c);

                    if (printToConsole)
                    {
                        margScoreMean = scoreMean.Marginal <Gaussian>();
                        margScorePrec = scorePrec.Marginal <Gamma>();
                        margJudgePrec = judgePrec.Marginal <Gamma>();
                        margClickPrec = clickPrec.Marginal <Gamma>();
                        margThresh    = new Gaussian[numThresholds];
                        for (int i = 0; i < numThresholds; i++)
                        {
                            margThresh[i] = thresholds[i].Marginal <Gaussian>();
                        }
                        Console.WriteLine("****** Pass {0}, chunk {1} ******", pass, c);
                        Console.WriteLine("----- Marginals -----");
                        Console.WriteLine("scoreMean = {0}", margScoreMean);
                        Console.WriteLine("scorePrec = {0}", margScorePrec);
                        Console.WriteLine("judgePrec = {0}", margJudgePrec);
                        Console.WriteLine("clickPrec = {0}", margClickPrec);
                        for (int t = 0; t < numThresholds; t++)
                        {
                            Console.WriteLine("threshMean {0} = {1}", t, margThresh[t]);
                        }
                    }
                }
            }
            margScoreMean = scoreMean.Marginal <Gaussian>();
            margScorePrec = scorePrec.Marginal <Gamma>();
            margJudgePrec = judgePrec.Marginal <Gamma>();
            margClickPrec = clickPrec.Marginal <Gamma>();
            margThresh    = new Gaussian[numThresholds];
            for (int i = 0; i < numThresholds; i++)
            {
                margThresh[i] = thresholds[i].Marginal <Gaussian>();
            }
        }
        public void Run()
        {
            InferenceEngine engine = new InferenceEngine();

            if (!(engine.Algorithm is Algorithms.ExpectationPropagation))
            {
                Console.WriteLine("This example only runs with Expectation Propagation");
                return;
            }

            Rand.Restart(0);

            int      nQuestions          = 100;
            int      nSubjects           = 40;
            int      nChoices            = 4;
            Gaussian abilityPrior        = new Gaussian(0, 1);
            Gaussian difficultyPrior     = new Gaussian(0, 1);
            Gamma    discriminationPrior = Gamma.FromMeanAndVariance(1, 0.01);

            double[] trueAbility, trueDifficulty, trueDiscrimination;
            int[]    trueTrueAnswer;
            int[][]  data = Sample(
                nSubjects,
                nQuestions,
                nChoices,
                abilityPrior,
                difficultyPrior,
                discriminationPrior,
                out trueAbility,
                out trueDifficulty,
                out trueDiscrimination,
                out trueTrueAnswer);
            Range question = new Range(nQuestions).Named("question");
            Range subject  = new Range(nSubjects).Named("subject");
            Range choice   = new Range(nChoices).Named("choice");
            var   response = Variable.Array(Variable.Array <int>(question), subject).Named("response");

            response.ObservedValue = data;

            var ability = Variable.Array <double>(subject).Named("ability");

            ability[subject] = Variable.Random(abilityPrior).ForEach(subject);
            var difficulty = Variable.Array <double>(question).Named("difficulty");

            difficulty[question] = Variable.Random(difficultyPrior).ForEach(question);
            var discrimination = Variable.Array <double>(question).Named("discrimination");

            discrimination[question] = Variable.Random(discriminationPrior).ForEach(question);
            var trueAnswer = Variable.Array <int>(question).Named("trueAnswer");

            trueAnswer[question] = Variable.DiscreteUniform(choice).ForEach(question);

            using (Variable.ForEach(subject))
            {
                using (Variable.ForEach(question))
                {
                    var advantage      = (ability[subject] - difficulty[question]).Named("advantage");
                    var advantageNoisy = Variable.GaussianFromMeanAndPrecision(advantage, discrimination[question]).Named("advantageNoisy");
                    var correct        = (advantageNoisy > 0).Named("correct");
                    using (Variable.If(correct))
                    {
                        response[subject][question] = trueAnswer[question];
                    }

                    using (Variable.IfNot(correct))
                    {
                        response[subject][question] = Variable.DiscreteUniform(choice);
                    }
                }
            }

            engine.NumberOfIterations = 5;
            subject.AddAttribute(new Models.Attributes.Sequential());  // needed to get stable convergence
            question.AddAttribute(new Models.Attributes.Sequential()); // needed to get stable convergence
            bool doMajorityVoting = false;                             // set this to 'true' to do majority voting

            if (doMajorityVoting)
            {
                ability.ObservedValue        = Util.ArrayInit(nSubjects, i => 0.0);
                difficulty.ObservedValue     = Util.ArrayInit(nQuestions, i => 0.0);
                discrimination.ObservedValue = Util.ArrayInit(nQuestions, i => 1.0);
            }

            var trueAnswerPosterior = engine.Infer <IList <Discrete> >(trueAnswer);
            int numCorrect          = 0;

            for (int q = 0; q < nQuestions; q++)
            {
                int bestGuess = trueAnswerPosterior[q].GetMode();
                if (bestGuess == trueTrueAnswer[q])
                {
                    numCorrect++;
                }
            }

            double pctCorrect = 100.0 * numCorrect / nQuestions;

            Console.WriteLine("{0}% TrueAnswers correct", pctCorrect.ToString("f0"));
            var difficultyPosterior = engine.Infer <IList <Gaussian> >(difficulty);

            for (int q = 0; q < System.Math.Min(nQuestions, 4); q++)
            {
                Console.WriteLine("difficulty[{0}] = {1} (sampled from {2})", q, difficultyPosterior[q], trueDifficulty[q].ToString("g2"));
            }

            var discriminationPosterior = engine.Infer <IList <Gamma> >(discrimination);

            for (int q = 0; q < System.Math.Min(nQuestions, 4); q++)
            {
                Console.WriteLine("discrimination[{0}] = {1} (sampled from {2})", q, discriminationPosterior[q], trueDiscrimination[q].ToString("g2"));
            }

            var abilityPosterior = engine.Infer <IList <Gaussian> >(ability);

            for (int s = 0; s < System.Math.Min(nSubjects, 4); s++)
            {
                Console.WriteLine("ability[{0}] = {1} (sampled from {2})", s, abilityPosterior[s], trueAbility[s].ToString("g2"));
            }
        }
示例#4
0
        public void Run()
        {
            // This example requires EP
            InferenceEngine engine = new InferenceEngine();

            if (!(engine.Algorithm is Algorithms.ExpectationPropagation))
            {
                Console.WriteLine("This example only runs with Expectation Propagation");
                return;
            }

            int nPlayers = 10;
            int nYears   = 10;

            Rand.Restart(1);

            var skillPrior                     = new Gaussian(1200, 800 * 800);
            var drawMarginMeanPrior            = new Gaussian(700, 500 * 500);
            var drawMarginPrecisionPrior       = Gamma.FromShapeAndRate(2, 500 * 500);
            var performancePrecisionPrior      = Gamma.FromShapeAndRate(2, 800 * 800);
            var skillChangePrecisionPrior      = Gamma.FromShapeAndRate(2, 26 * 26);
            var drawMarginChangePrecisionPrior = Gamma.FromShapeAndRate(2, 10 * 10);
            var whiteAdvantagePrior            = new Gaussian(0, 200 * 200);

            var drawMarginMean            = Variable.Random(drawMarginMeanPrior).Named("drawMarginMean");
            var drawMarginPrecision       = Variable.Random(drawMarginPrecisionPrior).Named("drawMarginPrecision");
            var performancePrecision      = Variable.Random(performancePrecisionPrior).Named("performancePrecision");
            var skillChangePrecision      = Variable.Random(skillChangePrecisionPrior).Named("skillChangePrecision");
            var drawMarginChangePrecision = Variable.Random(drawMarginChangePrecisionPrior).Named("drawMarginChangePrecision");
            var whiteAdvantage            = Variable.Random(whiteAdvantagePrior).Named("whiteAdvantage");

            Range player = new Range(nPlayers).Named("player");
            Range year   = new Range(nYears).Named("year");
            VariableArray <int> firstYear = Variable.Array <int>(player).Named("firstYear");
            var skill      = Variable.Array(Variable.Array <double>(player), year).Named("skill");
            var drawMargin = Variable.Array(Variable.Array <double>(player), year).Named("drawMargin");

            using (var yearBlock = Variable.ForEach(year))
            {
                var y = yearBlock.Index;
                using (Variable.If(y == 0))
                {
                    skill[year][player]      = Variable.Random(skillPrior).ForEach(player);
                    drawMargin[year][player] = Variable.GaussianFromMeanAndPrecision(drawMarginMean, drawMarginPrecision).ForEach(player);
                }

                using (Variable.If(y > 0))
                {
                    using (Variable.ForEach(player))
                    {
                        Variable <bool> isFirstYear = (firstYear[player] >= y).Named("isFirstYear");
                        using (Variable.If(isFirstYear))
                        {
                            skill[year][player]      = Variable.Random(skillPrior);
                            drawMargin[year][player] = Variable.GaussianFromMeanAndPrecision(drawMarginMean, drawMarginPrecision);
                        }

                        using (Variable.IfNot(isFirstYear))
                        {
                            skill[year][player]      = Variable.GaussianFromMeanAndPrecision(skill[y - 1][player], skillChangePrecision);
                            drawMargin[year][player] = Variable.GaussianFromMeanAndPrecision(drawMargin[y - 1][player], drawMarginChangePrecision);
                        }
                    }
                }
            }

            // Sample parameter values according to the above model
            firstYear.ObservedValue = Util.ArrayInit(nPlayers, i => Rand.Int(nYears));
            Parameters parameters = new Parameters();

            parameters.drawMarginMean            = drawMarginMeanPrior.Sample();
            parameters.drawMarginPrecision       = drawMarginPrecisionPrior.Sample();
            parameters.performancePrecision      = performancePrecisionPrior.Sample();
            parameters.skillChangePrecision      = skillChangePrecisionPrior.Sample();
            parameters.drawMarginChangePrecision = drawMarginChangePrecisionPrior.Sample();
            parameters.whiteAdvantage            = whiteAdvantagePrior.Sample();
            parameters.skill      = Util.ArrayInit(nYears, y => Util.ArrayInit(nPlayers, i => skillPrior.Sample()));
            parameters.drawMargin = Util.ArrayInit(nYears, y => Util.ArrayInit(nPlayers, i => Gaussian.Sample(parameters.drawMarginMean, parameters.drawMarginPrecision)));
            for (int y = 0; y < nYears; y++)
            {
                for (int i = 0; i < nPlayers; i++)
                {
                    if (y > firstYear.ObservedValue[i])
                    {
                        parameters.skill[y][i]      = Gaussian.Sample(parameters.skill[y - 1][i], parameters.skillChangePrecision);
                        parameters.drawMargin[y][i] = Gaussian.Sample(parameters.drawMargin[y - 1][i], parameters.drawMarginChangePrecision);
                    }
                }
            }

            // Sample game outcomes
            int[][] whiteData, blackData, outcomeData;
            GenerateData(parameters, firstYear.ObservedValue, out whiteData, out blackData, out outcomeData);

            bool inferParameters = false;  // make this true to infer additional parameters

            if (!inferParameters)
            {
                // fix the true parameters
                drawMarginMean.ObservedValue            = parameters.drawMarginMean;
                drawMarginPrecision.ObservedValue       = parameters.drawMarginPrecision;
                performancePrecision.ObservedValue      = parameters.performancePrecision;
                skillChangePrecision.ObservedValue      = parameters.skillChangePrecision;
                drawMarginChangePrecision.ObservedValue = parameters.drawMarginChangePrecision;
            }

            // Learn the skills from the data
            int[] nGamesData  = Util.ArrayInit(nYears, y => outcomeData[y].Length);
            var   nGames      = Variable.Observed(nGamesData, year).Named("nGames");
            Range game        = new Range(nGames[year]).Named("game");
            var   whitePlayer = Variable.Observed(whiteData, year, game).Named("whitePlayer");
            var   blackPlayer = Variable.Observed(blackData, year, game).Named("blackPlayer");
            var   outcome     = Variable.Observed(outcomeData, year, game).Named("outcome");

            using (Variable.ForEach(year))
            {
                using (Variable.ForEach(game))
                {
                    var w = whitePlayer[year][game];
                    var b = blackPlayer[year][game];
                    Variable <double> white_performance = Variable.GaussianFromMeanAndPrecision(skill[year][w], performancePrecision).Named("white_performance");
                    Variable <double> black_performance = Variable.GaussianFromMeanAndPrecision(skill[year][b], performancePrecision).Named("black_performance");
                    Variable <double> white_drawMargin  = Variable.Copy(drawMargin[year][w]).Named("white_drawMargin");
                    Variable <double> black_drawMargin  = Variable.Copy(drawMargin[year][b]).Named("black_drawMargin");
                    Variable <double> white_delta       = (white_performance - black_performance + whiteAdvantage).Named("white_delta");
                    using (Variable.Case(outcome[year][game], 0))
                    { // black wins
                        Variable.ConstrainTrue(white_delta + white_drawMargin < 0);
                    }

                    using (Variable.Case(outcome[year][game], 1))
                    { // draw
                        Variable.ConstrainBetween(white_delta, -white_drawMargin, black_drawMargin);
                    }

                    using (Variable.Case(outcome[year][game], 2))
                    { // white wins
                        Variable.ConstrainTrue(white_delta - black_drawMargin > 0);
                    }
                }
            }

            year.AddAttribute(new Models.Attributes.Sequential());   // helps inference converge faster

            engine.NumberOfIterations = 10;
            var skillPost      = engine.Infer <Gaussian[][]>(skill);
            var drawMarginPost = engine.Infer <Gaussian[][]>(drawMargin);

            // compare estimates to the true values
            if (inferParameters)
            {
                Console.WriteLine("drawMargin mean = {0} (truth = {1})", engine.Infer <Gaussian>(drawMarginMean), parameters.drawMarginMean);
                Console.WriteLine("drawMargin precision = {0} (truth = {1})", engine.Infer <Gamma>(drawMarginPrecision).GetMean(), parameters.drawMarginPrecision);
                Console.WriteLine("performancePrecision = {0} (truth = {1})", engine.Infer <Gamma>(performancePrecision).GetMean(), parameters.performancePrecision);
                Console.WriteLine("skillChangePrecision = {0} (truth = {1})", engine.Infer <Gamma>(skillChangePrecision).GetMean(), parameters.skillChangePrecision);
                Console.WriteLine("drawMarginChangePrecision = {0} (truth = {1})", engine.Infer <Gamma>(drawMarginChangePrecision).GetMean(), parameters.drawMarginChangePrecision);
            }

            Console.WriteLine("white advantage = {0} (truth = {1})", engine.Infer <Gaussian>(whiteAdvantage), parameters.whiteAdvantage);
            int countPrinted = 0;

            for (int y = 0; y < nYears; y++)
            {
                for (int p = 0; p < nPlayers; p++)
                {
                    if (y >= firstYear.ObservedValue[p])
                    {
                        if (++countPrinted > 3)
                        {
                            break;
                        }

                        Console.WriteLine("skill[{0}][{1}] = {2} (truth = {3:g4})", y, p, skillPost[y][p], parameters.skill[y][p]);
                        Console.WriteLine("drawMargin[{0}][{1}] = {2} (truth = {3:g4})", y, p, drawMarginPost[y][p], parameters.drawMargin[y][p]);
                    }
                }
            }
        }