Ejemplo n.º 1
0
        private void RandBinomial(double p, int n)
        {
            double meanExpected     = p * n;
            double varianceExpected = p * (1 - p) * n;
            double sum  = 0;
            double sum2 = 0;

            for (int i = 0; i < nsamples; i++)
            {
                double x = Rand.Binomial(n, p);
                sum  += x;
                sum2 += x * x;
            }
            double mean     = sum / nsamples;
            double variance = sum2 / nsamples - mean * mean;
            double error    = MMath.AbsDiff(meanExpected, mean, 1e-6);

            if (error > System.Math.Sqrt(varianceExpected) * 5)
            {
                Assert.True(false, string.Format("Binomial({0},{1}) mean = {2} should be {3}, error = {4}", p, n, mean, meanExpected, error));
            }
            error = MMath.AbsDiff(varianceExpected, variance, 1e-6);
            if (error > System.Math.Sqrt(varianceExpected) * 5)
            {
                Assert.True(false, string.Format("Binomial({0},{1}) variance = {2} should be {3}, error = {4}", p, n, variance, varianceExpected, error));
            }
        }
Ejemplo n.º 2
0
 public int Sample()
 {
     if (A != 1 || B != 1)
     {
         throw new NotImplementedException();
     }
     return(Rand.Binomial(TrialCount, ProbSuccess));
 }
Ejemplo n.º 3
0
        public void PoissonExpTest2()
        {
            Variable <bool>   evidence = Variable.Bernoulli(0.5).Named("evidence");
            IfBlock           block    = Variable.If(evidence);
            Variable <double> x        = Variable.GaussianFromMeanAndVariance(1.2, 3.4).Named("x");

            Rand.Restart(12347);
            int n = 10;
            int N = 10;

            int[] data = new int[N];
            for (int i = 0; i < N; i++)
            {
                data[i] = Rand.Binomial(n, 1.0 / (double)n);
            }
            data = new int[] { 5, 6, 7 };
            Range item = new Range(data.Length).Named("item");
            VariableArray <double> ex = Variable.Array <double>(item).Named("ex");

            ex[item] = Variable.Exp(x).ForEach(item);
            VariableArray <int> y = Variable.Array <int>(item).Named("y");

            y[item] = Variable.Poisson(ex[item]);
            block.CloseBlock();
            y.ObservedValue = data;

            InferenceEngine ie = new InferenceEngine(new VariationalMessagePassing());

            var ca = ie.GetCompiledInferenceAlgorithm(evidence, x);

            double oldLogEvidence = double.NegativeInfinity;

            for (int i = 0; i < 1000; i++)
            {
                ca.Update(1);
                double logEvidence1 = ca.Marginal <Bernoulli>(evidence.NameInGeneratedCode).LogOdds;
                Console.WriteLine(logEvidence1);
                if (i > 20 && System.Math.Abs(logEvidence1 - oldLogEvidence) < 1e-10)
                {
                    break;
                }
                oldLogEvidence = logEvidence1;
            }
            Gaussian xExpected = new Gaussian(1.755071011884509, 0.055154577283323);
            Gaussian xActual   = ca.Marginal <Gaussian>(x.NameInGeneratedCode);

            Console.WriteLine("x = {0} should be {1}", xActual, xExpected);
            Assert.True(xExpected.MaxDiff(xActual) < 1e-3);
        }
 public static int Binomial(int trialCount, double probSuccess)
 {
     return(Rand.Binomial(trialCount, probSuccess));
 }
        public void IndexOfMaximumDiverges()
        {
            int numberOfFeatures  = 10;
            int numberOfFolders   = 3;
            int numEmails         = 1000;
            var featuresCountsObs = Enumerable.Range(0, numEmails).Select(o => Rand.Binomial(numberOfFeatures, 5.0 / numberOfFeatures)).ToArray();
            var featureIndicesObs = featuresCountsObs.Select(o => Rand.Perm(numberOfFeatures).ToList().GetRange(0, o).ToArray()).ToArray();
            //var trueFeatureWeights = Enumerable.Range(0, numberOfFolders).Select(p=>Enumerable.Range(0, numberOfFeatures).Select(o => Rand.Normal()).ToArray()).ToArray();
            //var folders = featureIndicesObs.Select(fi=>fi.Select(p=>trueFeatureWeights.Select(q=>q[p]).Sum()
            // random data for now!
            var folders = Enumerable.Range(0, numEmails).Select(o => Rand.Int(numberOfFolders)).ToArray();

            Range numberOfFeaturesRange = new Range(numberOfFeatures).Named("NumberOfFeaturesRange");

            numberOfFeaturesRange.AddAttribute(new Sequential()); // This requires new build of Infer.NET

            // creat a range for the number of classes
            Range numberOfClases = new Range(numberOfFolders).Named("NumberOfClassesRange");

            // Model the total number of items
            var numberOfItems = Variable.New <int>().Named("numberOfItems");

            numberOfItems.ObservedValue = numEmails;
            Range numberOfItemsRange = new Range(numberOfItems).Named("numberOfItemsRange");

            numberOfItemsRange.AddAttribute(new Sequential());

            // Model the number features present in each item in each class
            var   featureCounts         = Variable.Array <int>(numberOfItemsRange).Named("featureCounts");
            Range featureCountItemRange = new Range(featureCounts[numberOfItemsRange]).Named("featureItemCountRange");

            featureCounts.ObservedValue = featuresCountsObs;

            // Model the features we observe
            var featureIndicies = Variable.Array(Variable.Array <int>(featureCountItemRange), numberOfItemsRange).Named("featureIndicies");

            featureIndicies.ObservedValue = featureIndicesObs;
            // Setup the priors
            var FeatureWeights = Variable.Array(Variable.Array <double>(numberOfFeaturesRange), numberOfClases).Named("FeatureWeights");

            FeatureWeights[numberOfClases][numberOfFeaturesRange] = Variable.GaussianFromMeanAndPrecision(0, 1).ForEach(numberOfClases, numberOfFeaturesRange);

            // Setup the label value (Folder)
            var folderValue = Variable.Array <int>(numberOfItemsRange).Named("folderValue");

            folderValue.ObservedValue = folders;

            var sparseWeightVector =
                Variable.Array(Variable.Array(Variable.Array <double>(featureCountItemRange), numberOfClases), numberOfItemsRange).Named("sparseWeightVector");

            ;
            sparseWeightVector[numberOfItemsRange][numberOfClases] = Variable.Subarray <double>(FeatureWeights[numberOfClases], featureIndicies[numberOfItemsRange]);
            var scoresWithNoise = Variable.Array(Variable.Array <double>(numberOfClases), numberOfItemsRange).Named("scoresWithNoise");

            scoresWithNoise[numberOfItemsRange][numberOfClases] = Variable.GaussianFromMeanAndVariance(Variable.Sum(sparseWeightVector[numberOfItemsRange][numberOfClases]), 1);
            folderValue[numberOfItemsRange] = Variable <int> .Factor(MMath.IndexOfMaximumDouble, scoresWithNoise[numberOfItemsRange]);

            folderValue.AddAttribute(new MarginalPrototype(Discrete.Uniform(numberOfClases.SizeAsInt)));

            var ie = new InferenceEngine();

            Console.WriteLine(ie.Infer(FeatureWeights));
        }