public void MarginalPrototypeOfConstantInGateTest()
        {
            Range item  = new Range(2).Named("i");
            var   bools = Variable.Array <bool>(item).Named("bools");

            bools.ObservedValue = Util.ArrayInit(item.SizeAsInt, i => false);
            var Response = Variable.Array <int>(item).Named("response");

            Response.ObservedValue = Util.ArrayInit(item.SizeAsInt, i => 0);
            var correct = Variable.Array <bool>(item).Named("correct");

            using (Variable.ForEach(item))
            {
                correct[item] = Variable.Bernoulli(0.1);
                using (Variable.If(bools[item]))
                {
                    using (Variable.If(correct[item]))
                        Response[item] = Variable.Constant(1);
                    using (Variable.IfNot(correct[item]))
                        Response[item] =
                            Variable.DiscreteUniform(4) + Variable.Constant(1);
                }
                using (Variable.IfNot(bools[item]))
                {
                    Response[item] = Variable.Constant(0);
                }
            }

            InferenceEngine engine = new InferenceEngine();

            Console.WriteLine(engine.Infer(correct));
            Console.WriteLine(engine.Infer(Response));
        }
        public Gaussian[] IndexOfMaximumFactorGate2(Discrete y, out double logEvidence)
        {
            var ev    = Variable.Bernoulli(0.5).Named("ev");
            int N     = y.Dimension;
            var n     = new Range(N).Named("n");
            var block = Variable.If(ev);
            var x     = Variable.Array <double>(n).Named("x");

            x[n] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(n);
            var yVar = Variable <int> .Random(y).Named("y");

            var indices = Variable.Observed(new int[] { 0, 1, 2 }, n);

            yVar.SetValueRange(n);
            using (Variable.Switch(yVar))
            {
                indices[yVar] = IndexOfMaximum(x).Named("temp");
            }
            block.CloseBlock();
            var ie = new InferenceEngine();

            //ie.NumberOfIterations = 2;
            logEvidence = ie.Infer <Bernoulli>(ev).LogOdds;
            return(ie.Infer <Gaussian[]>(x));
        }
Example #3
0
        public void GPClassificationTest4()
        {
            bool[]   yData = new bool[] { false, false, true, false };
            double[] xData = new double[]
            {
                -1.555555555555556, -1.111111111111111, -0.2222222222222223, 1.555555555555555
            };
            Vector[]             xVec     = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[]             basis    = new Vector[] { Vector.Zero(1) };
            IKernelFunction      kf       = new SquaredExponential(0.0);
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <bool> y = Variable.Array <bool>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = (h[item] > 0);
            block.CloseBlock();

            InferenceEngine engine        = new InferenceEngine();
            SparseGP        sgp           = engine.Infer <SparseGP>(f);
            Vector          alphaExpected = Vector.FromArray(new double[] { 0.409693797629808 });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
        }
Example #4
0
        public void CoinFlips()
        {
            int threshold = 20;
            int N         = 100;
            var n         = new Range(N).Named("n");
            var coins     = Variable.Array <bool>(n).Named("coins");

            coins[n] = Variable.Bernoulli(.5).ForEach(n);
            var RunLength = new Variable <int> [N];
            var zero      = Variable.Random(Discrete.PointMass(0, N));

            RunLength[0] = Variable.Constant(0);
            var thresholdExceeded = Variable.Bernoulli(0);

            for (int i = 1; i < N; i++)
            {
                RunLength[i] = Variable.New <int>();
                using (Variable.If(coins[i - 1]))
                    RunLength[i].SetTo(RunLength[i - 1] + 1);
                using (Variable.IfNot(coins[i - 1]))
                    RunLength[i].SetTo(zero);
                thresholdExceeded |= RunLength[i] > threshold;
            }
            var ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(RunLength[10]));
        }
Example #5
0
            public BpmPredict()
            {
                nFeatures = Variable.New <int>().Named("nFeatures");
                Range feature = new Range(nFeatures).Named("feature");

                w = Variable.Array <double>(feature).Named("w");
                //VariableArray<Gaussian> wPrior = Variable.Array<Gaussian>(feature).Named("wPrior");
                //w[feature] = Variable<double>.Random(wPrior[feature]);
                wPrior = Variable.New <GaussianArray>().Named("wPrior");
                w.SetTo(Variable <double[]> .Random(wPrior));
                biasPrior = Variable.New <Gaussian>().Named("biasPrior");
                bias      = Variable <double> .Random(biasPrior).Named("bias");

                xValueCount = Variable.New <int>().Named("xValueCount");
                Range userFeature = new Range(xValueCount).Named("userFeature");

                xValues  = Variable.Array <double>(userFeature).Named("xValues");
                xIndices = Variable.Array <int>(userFeature).Named("xIndices");
                y        = Variable.New <bool>().Named("y");
                VariableArray <double> product = Variable.Array <double>(userFeature).Named("product");
                VariableArray <double> wSparse = Variable.Subarray(w, xIndices);

                product[userFeature] = xValues[userFeature] * wSparse[userFeature];
                Variable <double> score = Variable.Sum(product).Named("score");

                y      = (Variable.GaussianFromMeanAndVariance(score + bias, 1.0) > 0);
                engine = new InferenceEngine();
                engine.Compiler.FreeMemory   = false;
                engine.Compiler.ReturnCopies = false;
                engine.OptimiseForVariables  = new IVariable[] { y };
                engine.ModelName             = "BpmPredict";
            }
Example #6
0
        /// <summary>
        /// Returns the community score matrix prior.
        /// </summary>
        /// <returns>The community score matrix prior.</returns>
        private VectorGaussian[] GetScoreMatrixPrior()
        {
            var dim       = new Range(LabelCount);
            var mean      = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(LabelCount), PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, 1));
            var prec      = Variable.WishartFromShapeAndRate(1.0, PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, 1));
            var score     = Variable.VectorGaussianFromMeanAndPrecision(mean, prec);
            var confusion = Variable.Softmax(score);

            confusion.SetValueRange(dim);
            var confusionConstraint = Variable.New <Dirichlet>();

            Variable.ConstrainEqualRandom(confusion, confusionConstraint);
            var engine = new InferenceEngine(new VariationalMessagePassing())
            {
                ShowProgress = false
            };

            engine.Compiler.WriteSourceFiles = false;
            var scorePrior = new VectorGaussian[LabelCount];

            for (int d = 0; d < LabelCount; d++)
            {
                confusionConstraint.ObservedValue = new Dirichlet(Util.ArrayInit(LabelCount, i => i == d ? (InitialWorkerBelief / (1 - InitialWorkerBelief)) * (LabelCount - 1) : 1.0));
                scorePrior[d] = engine.Infer <VectorGaussian>(score);
            }

            return(scorePrior);
        }
Example #7
0
        public void RunAnyPlayersMatchup(Participant[] participants)
        {
            var range        = new Range(participants.Length);
            var results      = Variable.Array <bool>(range);
            var popularities = Variable.Array <double>(range);
            var variances    = Variable.Array <double>(range);
            var priors       = Variable.Array <Gaussian>(range);

            priors.ObservedValue = participants.Select(
                p => p.popularityGaussian).ToArray();
            variances.ObservedValue = participants.Select(
                p => DynamicPopularityFactor * DynamicPopularityFactor).ToArray();

            using (var loop = Variable.ForEach(range))
            {
                popularities[range] = Variable.GaussianFromMeanAndVariance(
                    Variable <double> .Random(priors[range]),
                    variances[range]);

                using (Variable.If(loop.Index > 0))
                {
                    results[loop.Index]
                    .SetTo(popularities[loop.Index - 1] > popularities[loop.Index]);
                    results.ObservedValue = participants.Select(p => true).ToArray();
                }
            }

            var participantPosts = Ranking.Engine.Infer <Gaussian[]>(popularities);

            for (int i = 0; i < participants.Length; i++)
            {
                participants[i].popularityGaussian = participantPosts[i];
            }
        }
Example #8
0
        public void MixtureWithConstantSelector()
        {
            Range Trange             = new Range(2).Named("Trange");
            VariableArray <double> x = Variable.Constant(new double[] { 1, 15 }, Trange).Named("x");

            Range Krange = new Range(2).Named("Krange");
            VariableArray <double> means = Variable.Array <double>(Krange);

            means[Krange] = Variable.GaussianFromMeanAndPrecision(2, 1).ForEach(Krange);

            VariableArray <int> c = Variable.Constant(new int[] { 0, 1 }, Trange).Named("c");

            c[Trange] = Variable.DiscreteUniform(Krange).ForEach(Trange);

            using (Variable.ForEach(Trange))
            {
                using (Variable.Switch(c[Trange]))
                {
                    x[Trange] = Variable.GaussianFromMeanAndPrecision(means[c[Trange]], 1);
                }
            }
            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            Console.WriteLine(ie.Infer(means));
        }
Example #9
0
        public virtual void CreateModel()
        {
            NumComponents = 2;
            Range ComponentRange = new Range(NumComponents);

            InferenceEngine = new InferenceEngine(new VariationalMessagePassing());
            InferenceEngine.ShowProgress = false;

            AverageTimePriors  = Variable.Array <Gaussian>(ComponentRange);
            TrafficNoisePriors = Variable.Array <Gamma>(ComponentRange);
            AverageTime        = Variable.Array <double>(ComponentRange);
            TrafficNoise       = Variable.Array <double>(ComponentRange);

            using (Variable.ForEach(ComponentRange))
            {
                AverageTime[ComponentRange] = Variable <double> .Random(AverageTimePriors[ComponentRange]);

                TrafficNoise[ComponentRange] = Variable <double> .Random(TrafficNoisePriors[ComponentRange]);
            }

            // Mixing coefficients
            MixingPrior        = Variable.New <Dirichlet>();
            MixingCoefficients = Variable <Vector> .Random(MixingPrior);

            MixingCoefficients.SetValueRange(ComponentRange);
        }
Example #10
0
        public void Mixture1()
        {
            double[] data            = { 7 };
            int      T               = data.Length;
            VariableArray <double> x = Variable.Constant(data).Named("data");
            Range             i      = x.Range;
            Variable <Vector> D      = Variable.Dirichlet(new double[] { 1, 1 });

            VariableArray <int> c = Variable.Array <int>(i);

            using (Variable.ForEach(i))
            {
                c[i] = Variable.Discrete(D);
                using (Variable.Case(c[i], 0))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(5.5, 1);
                }
                using (Variable.Case(c[i], 1))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(8, 1);
                }
            }

            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            double[] DVibesResult = new double[] { -1.15070203880043, -0.67501576717763 };
            VmpTests.TestDirichletMoments(ie, D, DVibesResult);
            double[][] cVibesResult = new double[T][];
            cVibesResult[0] = new double[] { 0.24961241199438, 0.75038758800562 };
            VmpTests.TestDiscrete(ie, c, cVibesResult);
        }
Example #11
0
        public void Mixture2()
        {
            double[] data            = { .5, 12, 11 };
            int      T               = data.Length;
            VariableArray <double> x = Variable.Constant(data).Named("data");
            Range             i      = x.Range;
            Variable <Vector> D      = Variable.Dirichlet(new double[] { 1, 1 });

            VariableArray <int> c = Variable.Array <int>(i);

            using (Variable.ForEach(i))
            {
                c[i] = Variable.Discrete(D);
                using (Variable.Case(c[i], 0))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(5, 1);
                }
                using (Variable.Case(c[i], 1))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(10, 1);
                }
            }

            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            double[] DVibesResult = new double[] { -1.08333333348476, -0.58333333335936 };
            VmpTests.TestDirichletMoments(ie, D, DVibesResult);
            double[][] cVibesResult = new double[T][];
            cVibesResult[0] = new double[] { 1.00000000000000, 0.00000000000000 };
            cVibesResult[1] = new double[] { 0.00000000000000, 1.00000000000000 };
            cVibesResult[2] = new double[] { 0.00000000000000, 1.00000000000000 };
            VmpTests.TestDiscrete(ie, c, cVibesResult);
        }
Example #12
0
        public void MissingDataGaussianTest()
        {
            Variable <double>      mean      = Variable.GaussianFromMeanAndVariance(0, 100).Named("mean");
            Variable <double>      precision = Variable.GammaFromShapeAndScale(1, 1).Named("precision");
            Variable <int>         n         = Variable.New <int>().Named("n");
            Range                  i         = new Range(n).Named("i");
            VariableArray <double> x         = Variable.Array <double>(i).Named("x");

            using (Variable.ForEach(i))
            {
                using (Variable.If(x[i] > 0))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(mean, precision);
                }
            }
            x.ObservedValue = new double[] { -1, 5.0, -1, 7.0, -1 };
            n.ObservedValue = x.ObservedValue.Length;

            InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing());
            //Console.WriteLine(engine.Infer(isMissing));
            Gaussian meanExpected      = Gaussian.FromMeanAndVariance(5.9603207170807826, 0.66132138200164436);
            Gamma    precisionExpected = Gamma.FromShapeAndRate(2, 2.6628958274937107);
            Gaussian meanActual        = engine.Infer <Gaussian>(mean);
            Gamma    precisionActual   = engine.Infer <Gamma>(precision);

            Console.WriteLine("mean = {0} should be {1}", meanActual, meanExpected);
            Console.WriteLine("precision = {0} should be {1}", precisionActual, precisionExpected);
            Assert.True(meanExpected.MaxDiff(meanActual) < 1e-10);
            Assert.True(precisionExpected.MaxDiff(precisionActual) < 1e-10);
        }
        public void PottsGridTest()
        {
            int   size = 10;
            Range rows = new Range(size).Named("rows");
            Range cols = new Range(size).Named("cols");

            var states = Variable.Array <bool>(rows, cols).Named("states");

            Bernoulli[,] unary = new Bernoulli[size, size];
            for (int i = 0; i < size; i++)
            {
                for (int j = 0; j < size; j++)
                {
                    var  xdist  = System.Math.Abs(i - size / 2) / ((double)size);
                    var  ydist  = System.Math.Abs(j - size / 2) / ((double)size);
                    bool inrect = (xdist < 0.2) && (ydist < 0.2);
                    unary[i, j] = new Bernoulli(inrect ? 0.8 : 0.2);
                }
            }

            var p = Variable.Observed(unary, rows, cols);

            states[rows, cols] = Variable.Random <bool, Bernoulli>(p[rows, cols]);

            double logCost = 0;

            using (ForEachBlock rowBlock = Variable.ForEach(rows))
            {
                using (ForEachBlock colBlock = Variable.ForEach(cols))
                {
                    using (Variable.If(rowBlock.Index >= 1))
                    {
                        Variable.Potts(states[rowBlock.Index, colBlock.Index],
                                       states[rowBlock.Index + -1, colBlock.Index], logCost);
                    }

                    using (Variable.If(colBlock.Index >= 1))
                    {
                        Variable.Potts(states[rowBlock.Index, colBlock.Index],
                                       states[rowBlock.Index, colBlock.Index + -1], logCost);
                    }
                }
            }

            InferenceEngine engine = new InferenceEngine();

            engine.Algorithm          = new MaxProductBeliefPropagation();
            engine.ShowTimings        = true;
            engine.NumberOfIterations = 20;
            var result = engine.Infer <Bernoulli[, ]>(states);

            for (int i = 0; i < result.GetLength(0); i++)
            {
                for (int j = 0; j < result.GetLength(1); j++)
                {
                    Console.Write("{0:f5} ", result[i, j].GetProbTrue());
                }
                Console.WriteLine();
            }
        }
        public void MarginalPrototypeOfConstantInGateTest2()
        {
            Range r       = new Range(3);
            var   skills  = Variable.Array <double>(r).Named("skills");
            var   results = Variable.Array <int>(r).Named("results");

            using (Variable.ForEach(r))
            {
                skills[r] = Variable.GaussianFromMeanAndPrecision(25.5, 0.05);
                var b = (skills[r] - 25) > 0;
                using (Variable.If(b))
                {
                    results[r] = 0;
                }
                using (Variable.IfNot(b))
                {
                    results[r] = 1;
                }
            }
            int[] observedResults = new int[3] {
                0, 0, 0
            };
            Variable.ConstrainEqual(results, observedResults);

            InferenceEngine engine = new InferenceEngine();

            Console.WriteLine(engine.Infer(skills));
        }
Example #15
0
        public static VariableArray <Vector> CPT1(Range rY, Range rS)
        {
            int    dimS = rS.SizeAsInt;
            int    dimY = rY.SizeAsInt;
            double pc   = 1 / (double)(dimS * dimY);

            var priorObs = new Dirichlet[dimS];

            for (int i = 0; i < dimS; i++)
            {
                priorObs[i] = Dirichlet.Symmetric(dimS, pc);
            }

            var priorVar = Variable.Array <Dirichlet>(rS);

            priorVar.ObservedValue = priorObs;

            var cpt = Variable.Array <Vector>(rS);

            cpt[rS] = Variable <Vector> .Random(priorVar[rS]);

            cpt.SetValueRange(rY);

            return(cpt);
        }
Example #16
0
        public DistributionArray <Bernoulli> TestNaiveBayes(double a, out double noiseEstimate)
        {
            noiseEstimate = double.NaN;
            int K = xtrain[0].Count;
            // Create target y
            VariableArray <bool> y        = Variable.Observed(ytrain).Named("y");
            Variable <Vector>    meanTrue = Variable.Random(new VectorGaussian(Vector.Zero(K),
                                                                               PositiveDefiniteMatrix.Identity(K))).Named("m1");
            Variable <Vector> meanFalse = Variable.Random(new VectorGaussian(Vector.Zero(K),
                                                                             PositiveDefiniteMatrix.Identity(K))).Named("m2");
            var precTrue  = Variable.Random(new Wishart(a, PositiveDefiniteMatrix.Identity(K)));
            var precFalse = Variable.Random(new Wishart(a, PositiveDefiniteMatrix.Identity(K)));

            NaiveBayes(xtrain, meanTrue, meanFalse, precTrue, precFalse, y);
            //InferenceEngine.DefaultEngine.Compiler.UseSerialSchedules = true;
            InferenceEngine      engine        = new InferenceEngine(new VariationalMessagePassing());
            var                  meanTruePost  = engine.Infer <VectorGaussian>(meanTrue);
            var                  meanFalsePost = engine.Infer <VectorGaussian>(meanFalse);
            var                  precTruePost  = engine.Infer <Wishart>(precTrue);
            var                  precFalsePost = engine.Infer <Wishart>(precFalse);
            var                  testRange     = new Range(ytest.Length);
            VariableArray <bool> yt            = Variable.Array <bool>(testRange).Named("ytest");

            yt[testRange] = Variable.Bernoulli(0.5).ForEach(testRange);
            NaiveBayes(xtest, Variable.Random(meanTruePost), Variable.Random(meanFalsePost), Variable.Random <PositiveDefiniteMatrix>(precTruePost),
                       Variable.Random <PositiveDefiniteMatrix>(precFalsePost), yt);
            return(engine.Infer <DistributionArray <Bernoulli> >(yt));
        }
        public Gaussian[] IndexOfMaximumFactorGate(Discrete y, out double logEvidence)
        {
            var ev    = Variable.Bernoulli(0.5).Named("ev");
            int N     = y.Dimension;
            var n     = new Range(N).Named("n");
            var block = Variable.If(ev);
            var x     = Variable.Array <double>(n).Named("x");

            x[n] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(n);
            var yVar = Variable <int> .Random(y).Named("y");

            for (int index = 0; index < N; index++)
            {
                using (Variable.Case(yVar, index))
                {
                    //var temp = Variable.Observed<int>(index).Named("temp"+index) ;
                    //temp.SetTo(Variable<int>.Factor(MMath.IndexOfMaximumDouble, x).Named("fac"+index));
                    var temp = IndexOfMaximum(x).Named("temp" + index);
                    temp.ObservedValue = index;
                }
            }
            block.CloseBlock();
            var ie = new InferenceEngine();

            ie.ModelName            = "FactorGate";
            ie.OptimiseForVariables = new List <IVariable>()
            {
                x, ev
            };
            logEvidence = ie.Infer <Bernoulli>(ev).LogOdds;
            return(ie.Infer <Gaussian[]>(x));
        }
Example #18
0
        public DistributionArray <Bernoulli> TestNaiveBayesDiagonalHierarchical(double a, out double noiseEstimate)
        {
            noiseEstimate = double.NaN;
            var xtrainArray = xtrain.Select(i => i.ToArray()).ToArray();
            var xtestArray  = xtest.Select(i => i.ToArray()).ToArray();
            int K           = xtrain[0].Count;
            var k           = new Range(K);
            // Create target y
            VariableArray <bool> y = Variable.Observed(ytrain).Named("y");
            var meanTrue           = Variable.Array <double>(k);

            meanTrue[k] = Variable.GaussianFromMeanAndPrecision(
                Variable.GaussianFromMeanAndPrecision(0, 1),
                Variable.GammaFromShapeAndRate(.1, .1)).ForEach(k);
            var precTrue = Variable.Array <double>(k);

            precTrue[k] = Variable.GammaFromShapeAndRate(
                1,
                Variable.GammaFromShapeAndRate(.1, .1)).ForEach(k);
            var meanFalse = Variable.Array <double>(k);

            meanFalse[k] = Variable.GaussianFromMeanAndPrecision(
                Variable.GaussianFromMeanAndPrecision(0, 1),
                Variable.GammaFromShapeAndRate(.1, .1)).ForEach(k);
            var precFalse = Variable.Array <double>(k);

            precFalse[k] = Variable.GammaFromShapeAndRate(
                1,
                Variable.GammaFromShapeAndRate(.1, .1)).ForEach(k);
            NaiveBayesDiagonal(xtrainArray, meanTrue, meanFalse, precTrue, precFalse, y);
            //InferenceEngine.DefaultEngine.Compiler.UseSerialSchedules = true;
            InferenceEngine      engine        = new InferenceEngine(new VariationalMessagePassing());
            var                  meanTruePost  = engine.Infer <Gaussian[]>(meanTrue);
            var                  meanFalsePost = engine.Infer <Gaussian[]>(meanFalse);
            var                  precTruePost  = engine.Infer <Gamma[]>(precTrue);
            var                  precFalsePost = engine.Infer <Gamma[]>(precFalse);
            var                  testRange     = new Range(ytest.Length);
            VariableArray <bool> yt            = Variable.Array <bool>(testRange).Named("ytest");

            yt[testRange] = Variable.Bernoulli(0.5).ForEach(testRange);
            var meanTruePrior  = Variable.Observed <Gaussian>(meanTruePost, k);
            var precTruePrior  = Variable.Observed <Gamma>(precTruePost, k);
            var meanFalsePrior = Variable.Observed <Gaussian>(meanFalsePost, k);
            var precFalsePrior = Variable.Observed <Gamma>(precFalsePost, k);

            meanTrue    = Variable.Array <double>(k);
            meanTrue[k] = Variable <double> .Random(meanTruePrior[k]);

            precTrue    = Variable.Array <double>(k);
            precTrue[k] = Variable <double> .Random(precTruePrior[k]);

            meanFalse    = Variable.Array <double>(k);
            meanFalse[k] = Variable <double> .Random(meanFalsePrior[k]);

            precFalse    = Variable.Array <double>(k);
            precFalse[k] = Variable <double> .Random(precFalsePrior[k]);

            NaiveBayesDiagonal(xtestArray, meanTrue, meanFalse, precTrue, precFalse, yt);
            return(engine.Infer <DistributionArray <Bernoulli> >(yt));
        }
        public void StringFormatSimpleTest()
        {
            // number of objects
            var J     = new Range(2).Named("J");
            var names = Variable.Array <string>(J).Named("names");

            names[J] = Variable.Random(WordString()).ForEach(J);

            Variable.ConstrainEqual(names[0], "John");
            var i        = Variable.DiscreteUniform(J);
            var template = Variable.Random(StringDistribution.Any());
            var s        = Variable.New <string>();

            using (Variable.Switch(i))
            {
                s.SetTo(Variable.StringFormat(template, names[i]));
            }

            s.ObservedValue = "John was here";

            var engine = new InferenceEngine {
                NumberOfIterations = 10
            };

            engine.Compiler.RecommendedQuality = QualityBand.Experimental;
            engine.Compiler.GivePriorityTo(typeof(ReplicateOp_NoDivide));
            Console.WriteLine("posterior=" + engine.Infer(i));
        }
        /// <summary>
        /// Constructs an LDA model
        /// </summary>
        /// <param name="sizeVocab">Size of vocabulary</param>
        /// <param name="numTopics">Number of topics</param>
        public LDAPredictionModel(int sizeVocab, int numTopics)
        {
            SizeVocab          = sizeVocab;
            NumTopics          = numTopics;
            PredictionSparsity = Sparsity.Sparse;

            //---------------------------------------------
            // The model
            //---------------------------------------------
            Range W = new Range(SizeVocab).Named("W");
            Range T = new Range(NumTopics).Named("T");

            ThetaPrior = Variable.New <Dirichlet>().Named("ThetaPrior");
            PhiPrior   = Variable.Array <Dirichlet>(T).Named("PhiPrior").Attrib(new ValueRange(W));
            Theta      = Variable.New <Vector>().Named("Theta");
            Phi        = Variable.Array <Vector>(T).Named("Phi");
            Theta      = Variable.Random <Vector, Dirichlet>(ThetaPrior);
            Phi[T]     = Variable.Random <Vector, Dirichlet>(PhiPrior[T]);

            Word = Variable.New <int>().Named("Word");
            Word.SetSparsity(PredictionSparsity);
            var topic = Variable.Discrete(Theta).Attrib(new ValueRange(T)).Named("topic");

            using (Variable.Switch(topic))
            {
                Word.SetTo(Variable.Discrete(Phi[topic]));
            }

            Engine = new InferenceEngine(new VariationalMessagePassing());
            Engine.Compiler.ShowWarnings = false;
        }
Example #21
0
        public void amin()
        {
            int   X_No    = 5;
            int   Z_No    = 3;
            Range X_Range = new Range(X_No).Named("X_Range");
            Range Z_Range = new Range(Z_No).Named("Z_Range");

            int[][] Index = new int[][] { new int[] { 0, 1, 3 },
                                          new int[] { 1, 2, 4 },
                                          new int[] { 2, 4 } };
            int[] sizes = new int[Z_No];
            for (int i = 0; i < Z_No; i++)
            {
                sizes[i] = Index[i].Length;
            }
            VariableArray <int> sizesVar = Variable.Constant(sizes, Z_Range);
            Range Index_Range            = new Range(sizesVar[Z_Range]).Named("Index_Range");
            VariableArray <VariableArray <int>, int[][]> indexVar = Variable.Array(Variable.Array <int>(Index_Range), Z_Range).Named("indexVar");

            indexVar.ObservedValue = Index;
            VariableArray <double> Var_X = Variable.Array <double>(X_Range).Named("Var_X");
            VariableArray <double> Var_Z = Variable.Array <double>(Z_Range);

            Var_X[X_Range] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(X_Range);
            Var_Z[Z_Range] = Variable.Sum(Variable.GetItems(Var_X, indexVar[Z_Range]));
            InferenceEngine ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(Var_Z));
        }
        public void PruningTest()
        {
            int    numT  = 2;
            int    numD  = 2;
            var    D     = new Range(numD);
            var    T     = new Range(numT);
            double alpha = 0.5;
            Vector v     = Vector.Constant(numT, alpha, Sparsity.Sparse);

            // Using an observed prior:
            var prior = Variable.New <Dirichlet>();

            prior.ObservedValue = new Dirichlet(v);
            var theta1 = Variable.Array <Vector>(D).Named("theta1");

            theta1[D] = Variable.Random <Vector, Dirichlet>(prior).ForEach(D);

            // Using a constant vector
            var theta2 = Variable.Array <Vector>(D).Named("theta2");

            theta2[D] = Variable.Dirichlet(T, Vector.Constant(numT, alpha, Sparsity.Sparse)).ForEach(D);

            // These should give the same result.
            var engine     = new InferenceEngine(new VariationalMessagePassing());
            var postTheta1 = engine.Infer <Dirichlet[]>(theta1);
            var postTheta2 = engine.Infer <Dirichlet[]>(theta2);

            for (int i = 0; i < numD; i++)
            {
                Assert.Equal(postTheta2[i], postTheta1[i]);
            }
        }
Example #23
0
        public void AlexJames()
        {
            // Data about tasks: true = correct, false = incorrect
            bool[] TaskA_Math = { false, true, true, true, true, true, true, true, false, false };

            // Range for A
            int   numA       = TaskA_Math.Length;
            Range taskARange = new Range(numA);

            VariableArray <bool> data = Variable.Array <bool>(taskARange);

            // The prior
            var taskA_performance = Variable.Beta(1, 1);

            // The likelihood model
            data[taskARange] = Variable.Bernoulli(taskA_performance).ForEach(taskARange);

            // Attach data
            data.ObservedValue = TaskA_Math;

            // Inference engine (EP)
            InferenceEngine engine = new InferenceEngine();

            // Infer probability of Task A Math so can predict Task B_Math etc.
            Console.WriteLine("isCorrect = {0}", engine.Infer <Beta>(taskA_performance).GetMean());
        }
Example #24
0
        public static VariableArray <VariableArray <Vector>, Vector[][]> CPT2(Range rY, Range rS0, Range rS1)
        {
            int    dimS0 = rS0.SizeAsInt;
            int    dimS1 = rS1.SizeAsInt;
            int    dimY  = rY.SizeAsInt;
            double pc    = 1 / (double)(dimS0 * dimS1 * dimY);


            var priorObs = new Dirichlet[dimS0][];

            for (int i0 = 0; i0 < dimS0; i0++)
            {
                priorObs[i0] = new Dirichlet[dimS1];
                for (int i1 = 0; i1 < dimS1; i1++)
                {
                    priorObs[i0][i1] = Dirichlet.Symmetric(dimY, pc);
                }
            }

            var priorVar = Variable.Array(Variable.Array <Dirichlet>(rS1), rS0);

            priorVar.ObservedValue = priorObs;

            var cpt = Variable.Array(Variable.Array <Vector>(rS1), rS0);

            cpt[rS0][rS1] = Variable <Vector> .Random(priorVar[rS0][rS1]);

            cpt.SetValueRange(rY);
            return(cpt);
        }
        public void LocalArrayMarginalPrototypeTest2()
        {
            var Topics_size  = Variable.New <int>().Named("Topics_size");
            var Topics_range = new Range(Topics_size).Named("Topics_range");
            var Docs_size    = Variable.New <int>().Named("Docs_size");
            var Docs_range   = new Range(Docs_size).Named("Docs_range");

            ;
            var Docs_DocTopicDist = Variable.Array <Vector>(Docs_range).Named("Docs_DocTopicDist");

            //  "Cannot index Dirichlet.Uniform(v5.Length) by vint__1[index2] since v5 has an implicit dependency on Docs_range. Try making the dependency explicit by putting v5 into an array indexed by Docs_range"
            using (Variable.ForEach(Docs_range))
            {
                var v5  = Variable.Array <double>(Topics_range).Named("v5"); // the problematic array
                var v6  = Variable.Constant <double>(5);
                var v7  = Variable.Constant <double>(5.1);
                var v8  = Variable.Constant <double>(5.2);
                var v9  = Variable.Constant <double>(5.3);
                var v10 = Variable.Constant <double>(5.4);
                var v11 = Variable.Constant <double>(5.5);
                var v12 = Variable.Constant <double>(5.6);
                v5[Variable.Constant <int>(0)] = v6;
                v5[Variable.Constant <int>(1)] = v7;
                v5[Variable.Constant <int>(2)] = v8;
                v5[Variable.Constant <int>(3)] = v9;
                v5[Variable.Constant <int>(4)] = v10;
                v5[Variable.Constant <int>(5)] = v11;
                v5[Variable.Constant <int>(6)] = v12;
                var v13 = v5;
                Docs_DocTopicDist[Docs_range] = Variable.Dirichlet(Topics_range, Variable.Vector(v13));
            }

            var Occs_size  = Variable.New <int>();
            var Occs_range = new Range(Occs_size);
            var Occs_DocID = Variable.Array <int>(Occs_range);

            Occs_DocID.SetValueRange(Docs_range);

            var Occs_OccTopic = Variable.Array <int>(Occs_range);

            Occs_OccTopic.SetValueRange(Topics_range);

            using (Variable.ForEach(Occs_range))
            {
                var v3 = Occs_DocID[Occs_range];
                var v4 = Docs_DocTopicDist[v3];
                Occs_OccTopic[Occs_range] = Variable.Discrete(Topics_range, v4);
            }

            Topics_size.ObservedValue = 7;
            Docs_size.ObservedValue   = 3;
            Occs_size.ObservedValue   = 4;
            Occs_DocID.ObservedValue  = new int[] { 0, 1, 2, 2 };

            InferenceEngine engine = new InferenceEngine();

            engine.OptimiseForVariables = new IVariable[] { Docs_DocTopicDist, Occs_OccTopic };
            Console.WriteLine("Z=" + engine.Infer(Docs_DocTopicDist) + engine.Infer(Occs_OccTopic));
        }
Example #26
0
        // Construct a DINA model in Infer.NET
        public static void DinaModel(
            bool[][] responsesData,
            int nSkills,
            int[][] skillsRequired,
            Beta slipPrior,
            Beta guessPrior,
            out VariableArray <double> pSkill,
            out VariableArray <double> slip,
            out VariableArray <double> guess,
            out VariableArray <VariableArray <bool>, bool[][]> hasSkill)
        {
            // The Infer.NET model follows the same structure as the sampler above, but using Variables and Ranges
            int   nStudents  = responsesData.Length;
            int   nQuestions = skillsRequired.Length;
            Range student    = new Range(nStudents);
            Range question   = new Range(nQuestions);
            Range skill      = new Range(nSkills);
            var   responses  = Variable.Array(Variable.Array <bool>(question), student).Named("responses");

            responses.ObservedValue = responsesData;

            pSkill          = Variable.Array <double>(skill).Named("pSkill");
            pSkill[skill]   = Variable.Beta(1, 1).ForEach(skill);
            slip            = Variable.Array <double>(question).Named("slip");
            slip[question]  = Variable.Random(slipPrior).ForEach(question);
            guess           = Variable.Array <double>(question).Named("guess");
            guess[question] = Variable.Random(guessPrior).ForEach(question);

            hasSkill = Variable.Array(Variable.Array <bool>(skill), student).Named("hasSkill");
            hasSkill[student][skill] = Variable.Bernoulli(pSkill[skill]).ForEach(student);

            VariableArray <int> nSkillsRequired = Variable.Array <int>(question).Named("nSkillsRequired");

            nSkillsRequired.ObservedValue = Util.ArrayInit(nQuestions, q => skillsRequired[q].Length);
            Range skillForQuestion          = new Range(nSkillsRequired[question]).Named("skillForQuestion");
            var   skillsRequiredForQuestion = Variable.Array(Variable.Array <int>(skillForQuestion), question).Named("skillsRequiredForQuestion");

            skillsRequiredForQuestion.ObservedValue = skillsRequired;
            skillsRequiredForQuestion.SetValueRange(skill);

            using (Variable.ForEach(student))
            {
                using (Variable.ForEach(question))
                {
                    VariableArray <bool> hasSkills    = Variable.Subarray(hasSkill[student], skillsRequiredForQuestion[question]);
                    Variable <bool>      hasAllSkills = Variable.AllTrue(hasSkills);
                    using (Variable.If(hasAllSkills))
                    {
                        responses[student][question] = !Variable.Bernoulli(slip[question]);
                    }

                    using (Variable.IfNot(hasAllSkills))
                    {
                        responses[student][question] = Variable.Bernoulli(guess[question]);
                    }
                }
            }
        }
Example #27
0
        public void GPClassificationTest2()
        {
            bool[]   yData = new bool[] { false, true, false };
            double[] xData = new double[]
            {
                -1.555555555555556, -0.2222222222222223, 1.555555555555555
            };
            Vector[]             xVec     = Array.ConvertAll(xData, v => Vector.Constant(1, v));
            Vector[]             basis    = new Vector[] { Vector.Zero(1) };
            IKernelFunction      kf       = new SquaredExponential(0.0);
            SparseGPFixed        sgpf     = new SparseGPFixed(kf, basis);
            Variable <bool>      evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock              block    = Variable.If(evidence);
            Variable <IFunction> f        = Variable.Random <IFunction>(new SparseGP(sgpf)).Named("f");
            Range item = new Range(xVec.Length).Named("item");
            VariableArray <Vector> x = Variable.Array <Vector>(item).Named("x");

            x.ObservedValue = xVec;
            VariableArray <bool> y = Variable.Array <bool>(item).Named("y");

            y.ObservedValue = yData;
            VariableArray <double> h = Variable.Array <double>(item).Named("h");

            h[item] = Variable.FunctionEvaluate(f, x[item]);
            y[item] = (h[item] > 0);
            block.CloseBlock();

            InferenceEngine engine        = new InferenceEngine();
            SparseGP        sgp           = engine.Infer <SparseGP>(f);
            Vector          alphaExpected = Vector.FromArray(new double[] { 0.573337393823702 });

            Console.WriteLine("alpha = {0} should be {1}", sgp.Alpha, alphaExpected);
            double[] xTest = new double[]
            {
                -2, -1, 0.0
            };
            Vector[] xTestVec  = Array.ConvertAll(xTest, v => Vector.Constant(1, v));
            double[] yMeanTest = new double[]
            {
                0.077592778583272, 0.347746707713812, 0.573337393823702
            };
            double[] yVarTest = new double[]
            {
                0.986784459962251, 0.734558782611933, 0.278455962249970
            };
            for (int i = 0; i < xTestVec.Length; i++)
            {
                Gaussian pred         = sgp.Marginal(xTestVec[i]);
                Gaussian predExpected = new Gaussian(yMeanTest[i], yVarTest[i]);
                Console.WriteLine("f({0}) = {1} should be {2}", xTest[i], pred, predExpected);
                Assert.True(predExpected.MaxDiff(pred) < 1e-4);
            }
            double evExpected = -2.463679892165236;
            double evActual   = engine.Infer <Bernoulli>(evidence).LogOdds;

            Console.WriteLine("evidence = {0} should be {1}", evActual, evExpected);
            Assert.True(MMath.AbsDiff(evExpected, evActual, 1e-6) < 1e-4);
        }
Example #28
0
        public void BallCountingNoisy2()
        {
            // Variables describing the population
            int                  maxBalls = 8;
            Range                ball     = new Range(maxBalls).Named("ball");
            Variable <int>       numBalls = Variable.DiscreteUniform(maxBalls + 1).Named("numBalls");
            VariableArray <bool> isBlue   = Variable.Array <bool>(ball).Named("isBlue");

            isBlue[ball] = Variable.Bernoulli(0.5).ForEach(ball);

            // Variables describing the observations
            Range draw = new Range(10).Named("draw");

            using (Variable.ForEach(draw))
            {
                Variable <int> ballIndex = Variable.DiscreteUniform(ball, numBalls).Named("ballIndex");
                if (false)
                {
                    using (Variable.Switch(ballIndex))
                    {
                        Variable <bool> switchedColor = Variable.Bernoulli(0.2).Named("switchedColor");
                        using (Variable.If(switchedColor))
                        {
                            Variable.ConstrainFalse(isBlue[ballIndex]);
                        }
                        using (Variable.IfNot(switchedColor))
                        {
                            Variable.ConstrainTrue(isBlue[ballIndex]);
                        }
                    }
                }
                else
                {
                    Variable <bool> switchedColor = Variable.Bernoulli(0.2).Named("switchedColor");
                    using (Variable.If(switchedColor))
                    {
                        using (Variable.Switch(ballIndex))
                            Variable.ConstrainFalse(isBlue[ballIndex]);
                    }
                    using (Variable.IfNot(switchedColor))
                    {
                        using (Variable.Switch(ballIndex))
                            Variable.ConstrainTrue(isBlue[ballIndex]);
                    }
                }
            }


            // Inference queries about the program
            // -----------------------------------
            InferenceEngine engine         = new InferenceEngine();
            Discrete        numBallsActual = engine.Infer <Discrete>(numBalls);

            Console.WriteLine("numBalls = {0}", numBallsActual);
            Discrete numBallsExpected = new Discrete(0, 0.463, 0.2354, 0.1137, 0.06589, 0.04392, 0.0322, 0.02521, 0.02068);

            Assert.True(numBallsExpected.MaxDiff(numBallsActual) < 1e-4);
        }
Example #29
0
        /// <summary>
        /// Defines the variables and the ranges of CBCC.
        /// </summary>
        /// <param name="taskCount">The number of tasks.</param>
        /// <param name="labelCount">The number of labels.</param>
        protected override void DefineVariablesAndRanges(int taskCount, int labelCount)
        {
            WorkerCount = Variable.New <int>().Named("WorkerCount");
            m           = new Range(CommunityCount).Named("m");
            n           = new Range(taskCount).Named("n");
            c           = new Range(labelCount).Named("c");
            k           = new Range(WorkerCount).Named("k");

            // The tasks for each worker
            WorkerTaskCount = Variable.Array <int>(k).Named("WorkerTaskCount");
            kn = new Range(WorkerTaskCount[k]).Named("kn");
            WorkerTaskIndex = Variable.Array(Variable.Array <int>(kn), k).Named("WorkerTaskIndex");
            WorkerTaskIndex.SetValueRange(n);
            WorkerLabel = Variable.Array(Variable.Array <int>(kn), k).Named("WorkerLabel");

            // The background probability vector
            BackgroundLabelProbPrior = Variable.New <Dirichlet>().Named("BackgroundLabelProbPrior");
            BackgroundLabelProb      = Variable <Vector> .Random(BackgroundLabelProbPrior).Named("BackgroundLabelProb");

            BackgroundLabelProb.SetValueRange(c);

            // Community membership
            CommunityProbPrior = Variable.New <Dirichlet>().Named("CommunityProbPrior");
            CommunityProb      = Variable <Vector> .Random(CommunityProbPrior).Named("CommunityProb");

            CommunityProb.SetValueRange(m);
            Community           = Variable.Array <int>(k).Attrib(QueryTypes.Marginal).Attrib(QueryTypes.MarginalDividedByPrior).Named("Community");
            CommunityConstraint = Variable.Array <Discrete>(k).Named("CommunityConstraint");
            Community[k]        = Variable.Discrete(CommunityProb).ForEach(k);
            Variable.ConstrainEqualRandom(Community[k], CommunityConstraint[k]);
            // Initialiser to break symmetry for community membership
            CommunityInit = Variable.Array <Discrete>(k).Named("CommunityInit");
            Community[k].InitialiseTo(CommunityInit[k]);

            // Community parameters
            CommunityScoreMatrixPrior  = Variable.Array(Variable.Array <VectorGaussian>(c), m).Named("CommunityScoreMatrixPrior");
            CommunityScoreMatrix       = Variable.Array(Variable.Array <Vector>(c), m).Named("CommunityScoreMatrix");
            CommunityScoreMatrix[m][c] = Variable <Vector> .Random(CommunityScoreMatrixPrior[m][c]);

            CommunityConfusionMatrix       = Variable.Array(Variable.Array <Vector>(c), m).Named("CommunityConfusionMatrix");
            CommunityConfusionMatrix[m][c] = Variable.Softmax(CommunityScoreMatrix[m][c]);
            CommunityScoreMatrix.SetValueRange(c);

            // Parameters for each worker
            ScoreMatrix           = Variable.Array(Variable.Array <Vector>(c), k).Attrib(QueryTypes.Marginal).Attrib(QueryTypes.MarginalDividedByPrior).Named("ScoreMatrix");
            ScoreMatrixConstraint = Variable.Array(Variable.Array <VectorGaussian>(c), k).Named("ScoreMatrixConstraint");
            WorkerConfusionMatrix = Variable.Array(Variable.Array <Vector>(c), k).Named("ConfusionMatrix");

            // The unobserved 'true' label for each task
            TrueLabel           = Variable.Array <int>(n).Attrib(QueryTypes.Marginal).Attrib(QueryTypes.MarginalDividedByPrior).Named("Truth");
            TrueLabelConstraint = Variable.Array <Discrete>(n).Named("TruthConstraint");
            TrueLabel[n]        = Variable.Discrete(BackgroundLabelProb).ForEach(n);
            Variable.ConstrainEqualRandom(TrueLabel[n], TrueLabelConstraint[n]);

            // The labels given by the workers
            WorkerLabel = Variable.Array(Variable.Array <int>(kn), k).Named("WorkerLabel");
        }
Example #30
0
        internal void BirdCounting3()
        {
            double               noise        = 0.2;
            int                  maxBirds     = 8;
            Range                numBirdRange = new Range(maxBirds + 1).Named("numBirdRange");
            Variable <int>       numBirds     = Variable.DiscreteUniform(numBirdRange).Named("numBirds");
            SwitchBlock          block        = Variable.Switch(numBirds);
            Range                bird         = new Range(maxBirds).Named("bird");
            VariableArray <bool> isMale       = Variable.Array <bool>(bird).Named("isMale");

            isMale[bird] = Variable.Bernoulli(0.5).ForEach(bird);
            Variable <int>       numObserved  = Variable.New <int>().Named("numObserved");
            Range                observedBird = new Range(numObserved).Named("observedBird");
            VariableArray <bool> observedMale = Variable.Array <bool>(observedBird).Named("observedMale");

            //VariableArray<int> birdIndices = Variable.Array<int>(observedBird).Named("birdIndices");
            using (Variable.ForEach(observedBird))
            {
                //birdIndices[observedBird] = Variable.DiscreteUniform(numBirds);
                //Variable<int> birdIndex = birdIndices[observedBird];
                Variable <int> birdIndex = Variable.DiscreteUniform(bird, numBirds).Named("birdIndex");
                using (Variable.Switch(birdIndex))
                {
#if true
                    //Variable.ConstrainEqual(observedMale[observedBird], isMale[birdIndex]);
                    observedMale[observedBird] = (isMale[birdIndex] == Variable.Bernoulli(1 - noise));
#else
                    using (Variable.If(isMale[birdIndex])) {
                        observedMale[observedBird] = Variable.Bernoulli(0.8);
                    }
                    using (Variable.IfNot(isMale[birdIndex])) {
                        observedMale[observedBird] = Variable.Bernoulli(0.2);
                    }
#endif
                }
            }
            block.CloseBlock();

            InferenceEngine engine = new InferenceEngine();
            for (int numObservedInt = 6; numObservedInt <= 10; numObservedInt++)
            {
                Console.WriteLine("numObserved = {0}", numObservedInt);
                // weird behavior with 1 different out of >=9 obs, no obs noise
                bool[] data            = new bool[numObservedInt];
                int    numObservedMale = 0;
                for (int i = 0; i < data.Length; i++)
                {
                    data[i] = (i < numObservedMale);
                }
                numObserved.ObservedValue  = data.Length;
                observedMale.ObservedValue = data;
                //Console.WriteLine("birdIndices = {0}", engine.Infer(birdIndices));
                Console.WriteLine("isMale = {0}", engine.Infer(isMale));
                Console.WriteLine("numBirds = {0}", engine.Infer(numBirds));
                Console.WriteLine("   exact = {0}", BallCountingExact(maxBirds, numObservedInt, numObservedMale, noise));
            }
        }