예제 #1
0
 /// <summary>
 /// Sets the uninformed priors.
 /// </summary>
 public void SetUninformedPriors()
 {
     ProbInitPrior.ObservedValue = Dirichlet.Uniform(K.SizeAsInt);
     CPTTransPrior.ObservedValue = Util.ArrayInit(K.SizeAsInt, k => Dirichlet.Uniform(K.SizeAsInt)).ToArray();
     EmitMeanPrior.ObservedValue = Util.ArrayInit(K.SizeAsInt, k => Gaussian.FromMeanAndVariance(1000, 1000000000)).ToArray();
     EmitPrecPrior.ObservedValue = Util.ArrayInit(K.SizeAsInt, k => Gamma.FromMeanAndVariance(0.1, 100)).ToArray();
 }
예제 #2
0
        public void GaussianOpMean()
        {
            Gaussian result     = new Gaussian();
            Gaussian X0         = Gaussian.FromMeanAndVariance(7, 1.0 / 3);
            Gaussian Mean0      = Gaussian.FromMeanAndVariance(3, 0.5);
            Gamma    Precision0 = Gamma.FromShapeAndScale(3, 3);

            // Unknown precision
            Gamma    Precision = Gamma.FromShapeAndScale(3, 3);
            Gaussian X         = X0;
            Gaussian Mean      = Mean0;

            // in matlab: test_t_msg
            result = GaussianOp.MeanAverageConditional_slow(X, Mean, Precision);
            Console.WriteLine(result);
            Assert.True(GaussianOp.MeanAverageConditional_slow(X, Mean, Precision).MaxDiff(new Gaussian(-9.9121, -4.5998)) < 1e-0);

            X         = Gaussian.FromMeanAndVariance(1, 2);
            Mean      = Gaussian.PointMass(0);
            Precision = Gamma.FromShapeAndRate(3, 1);
            Gaussian xPostExpected = Gaussian.FromMeanAndVariance(0.178378819440295, 0.365796599498963);

            Console.WriteLine(GaussianOp.SampleAverageConditional_slow(X, Mean, Precision) * X);
            Assert.True(GaussianOp.SampleAverageConditional_slow(X, Mean, Precision).MaxDiff(xPostExpected / X) < 5e-7);
            Console.WriteLine(GaussianOp_Slow.SampleAverageConditional(X, Mean, Precision) * X);
            Assert.True(GaussianOp_Slow.SampleAverageConditional(X, Mean, Precision).MaxDiff(xPostExpected / X) < 5e-7);
        }
        private void GaussianProductOp_APointMass(double aMean, Gaussian Product, Gaussian B)
        {
            bool     isProper = Product.IsProper();
            Gaussian A        = Gaussian.PointMass(aMean);
            Gaussian result   = GaussianProductOp.AAverageConditional(Product, A, B);

            Console.WriteLine("{0}: {1}", A, result);
            Gaussian result2 = isProper ? GaussianProductOp_Slow.AAverageConditional(Product, A, B) : result;

            Console.WriteLine("{0}: {1}", A, result2);
            Assert.True(result.MaxDiff(result2) < 1e-6);
            var Amsg = InnerProductOp_PointB.BAverageConditional(Product, DenseVector.FromArray(B.GetMean()), new PositiveDefiniteMatrix(new double[, ] {
                { B.GetVariance() }
            }), VectorGaussian.PointMass(aMean), VectorGaussian.Uniform(1));

            //Console.WriteLine("{0}: {1}", A, Amsg);
            Assert.True(result.MaxDiff(Amsg.GetMarginal(0)) < 1e-6);
            double prevDiff = double.PositiveInfinity;

            for (int i = 3; i < 40; i++)
            {
                double v = System.Math.Pow(0.1, i);
                A       = Gaussian.FromMeanAndVariance(aMean, v);
                result2 = isProper ? GaussianProductOp.AAverageConditional(Product, A, B) : result;
                double diff = result.MaxDiff(result2);
                Console.WriteLine("{0}: {1} diff={2}", A, result2, diff.ToString("g4"));
                //Assert.True(diff <= prevDiff || diff < 1e-6);
                result2 = isProper ? GaussianProductOp_Slow.AAverageConditional(Product, A, B) : result;
                diff    = result.MaxDiff(result2);
                Console.WriteLine("{0}: {1} diff={2}", A, result2, diff.ToString("g4"));
                Assert.True(diff <= prevDiff || diff < 1e-6);
                prevDiff = diff;
            }
        }
예제 #4
0
 /// <summary>
 /// Priors from posteriors with possible missing features (e.g. for community training).
 /// </summary>
 /// <param name="posteriors">The posteriors.</param>
 /// <param name="featureSet">The feature set.</param>
 /// <param name="point">The point.</param>
 /// <param name="thresholdAndNoiseVariance">The threshold and noise variance.</param>
 /// <param name="userNames">The user names.</param>
 /// <returns>The <see cref="CommunityPriors" />.</returns>
 internal static CommunityPriors FromPosteriors(
     CommunityPosteriors posteriors,
     FeatureSet featureSet,
     double point,
     double thresholdAndNoiseVariance,
     IList <string> userNames)
 {
     return(new CommunityPriors
     {
         WeightMeans =
             featureSet.FeatureBuckets.ToDictionary(
                 ia => ia,
                 ia =>
                 posteriors.WeightMeans.ContainsKey(ia)
                     ? posteriors.WeightMeans[ia]
                     : Gaussian.FromMeanAndVariance(0.0, 1.0)),
         WeightPrecisions =
             featureSet.FeatureBuckets.ToDictionary(
                 ia => ia,
                 ia =>
                 posteriors.WeightPrecisions.ContainsKey(ia)
                     ? posteriors.WeightPrecisions[ia]
                     : Gamma.PointMass(point)),
         Thresholds =
             userNames.ToDictionary(
                 ia => ia,
                 ia =>
                 posteriors.Thresholds != null && posteriors.Thresholds.ContainsKey(ia)
                     ? posteriors.Thresholds[ia]
                     : Gaussian.FromMeanAndVariance(0.0, thresholdAndNoiseVariance)),
         NoiseVariance = thresholdAndNoiseVariance,
     });
 }
예제 #5
0
        public static Gaussian SumExceptAverageConditional([SkipIfAllUniform] IReadOnlyList <Gaussian> array, int index)
        {
            if (array.Count == 2)
            {
                return(array[1 - index]);
            }
            double mean     = 0;
            double variance = 0;

            for (int i = 0; i < array.Count; i++)
            {
                if (i == index)
                {
                    continue;
                }
                if (array[i].Precision == 0)
                {
                    return(array[i]);
                }
                double mean1;
                double variance1;
                array[i].GetMeanAndVariance(out mean1, out variance1);
                mean     += mean1;
                variance += variance1;
            }
            return(Gaussian.FromMeanAndVariance(mean, variance));
        }
        private void ExpModel()
        {
            double a = Factor.Random(Gaussian.FromMeanAndVariance(1.0, 2.0));
            double c = System.Math.Exp(a);

            InferNet.Infer(c, nameof(c));
        }
예제 #7
0
        private void GaussianOpX2(Gaussian mean, Gamma precision)
        {
            Gaussian sample, result, result2;

            sample  = Gaussian.PointMass(2);
            result  = GaussianOp.SampleAverageConditional_slow(sample, mean, precision);
            result2 = GaussianOp_Slow.SampleAverageConditional(sample, mean, precision);
            Console.WriteLine("{0}: {1} {2}", sample, result, result2);
            Assert.True(result.MaxDiff(result2) < 1e-8);
            double prevDiff = double.PositiveInfinity;

            for (int i = 8; i < 30; i++)
            {
                double v = System.Math.Pow(0.1, i);
                sample  = Gaussian.FromMeanAndVariance(2, v);
                result2 = GaussianOp_Slow.SampleAverageConditional(sample, mean, precision);
                double diff = result.MaxDiff(result2);
                Console.WriteLine("{0}: {1} diff={2}", sample, result2, diff.ToString("g4"));
                Assert.True(diff <= prevDiff || diff < 1e-6);
                result2 = GaussianOp.SampleAverageConditional_slow(sample, mean, precision);
                diff    = result.MaxDiff(result2);
                Console.WriteLine("{0}: {1} diff={2}", sample, result2, diff.ToString("g4"));
                Assert.True(diff <= prevDiff || diff < 1e-6);
                prevDiff = diff;
            }
        }
예제 #8
0
        public void MissingDataGaussianTest()
        {
            Variable <double>      mean      = Variable.GaussianFromMeanAndVariance(0, 100).Named("mean");
            Variable <double>      precision = Variable.GammaFromShapeAndScale(1, 1).Named("precision");
            Variable <int>         n         = Variable.New <int>().Named("n");
            Range                  i         = new Range(n).Named("i");
            VariableArray <double> x         = Variable.Array <double>(i).Named("x");

            using (Variable.ForEach(i))
            {
                using (Variable.If(x[i] > 0))
                {
                    x[i] = Variable.GaussianFromMeanAndPrecision(mean, precision);
                }
            }
            x.ObservedValue = new double[] { -1, 5.0, -1, 7.0, -1 };
            n.ObservedValue = x.ObservedValue.Length;

            InferenceEngine engine = new InferenceEngine(new VariationalMessagePassing());
            //Console.WriteLine(engine.Infer(isMissing));
            Gaussian meanExpected      = Gaussian.FromMeanAndVariance(5.9603207170807826, 0.66132138200164436);
            Gamma    precisionExpected = Gamma.FromShapeAndRate(2, 2.6628958274937107);
            Gaussian meanActual        = engine.Infer <Gaussian>(mean);
            Gamma    precisionActual   = engine.Infer <Gamma>(precision);

            Console.WriteLine("mean = {0} should be {1}", meanActual, meanExpected);
            Console.WriteLine("precision = {0} should be {1}", precisionActual, precisionExpected);
            Assert.True(meanExpected.MaxDiff(meanActual) < 1e-10);
            Assert.True(precisionExpected.MaxDiff(precisionActual) < 1e-10);
        }
        private void GaussianModel()
        {
            double m = Factor.Random(Gaussian.FromMeanAndVariance(1.0, 2.0));
            double p = Factor.Random(Gamma.FromMeanAndVariance(2.0, 1.0));
            double c = Factor.Gaussian(m, p);

            InferNet.Infer(c, nameof(c));
        }
예제 #10
0
        public static Gaussian GPPrediction(Vector x, Vector[] xData, Gaussian[] y, KernelFunction kf, PositiveDefiniteMatrix spec)
        {
            var    KxD      = Vector.FromArray(xData.Select(o => kf.EvaluateX1X2(x, o)).ToArray());
            double mean     = spec.QuadraticForm(KxD, Vector.FromArray(y.Select(o => o.GetMean()).ToArray()));
            double variance = kf.EvaluateX1X2(x, x) - spec.QuadraticForm(KxD);

            return(Gaussian.FromMeanAndVariance(mean, variance));
        }
예제 #11
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="ProductGaussianBetaVmpOp"]/message_doc[@name="ProductAverageLogarithm(Gaussian, Beta)"]/*'/>
        public static Gaussian ProductAverageLogarithm([SkipIfUniform] Gaussian A, [SkipIfUniform] Beta B)
        {
            double   ma = A.GetMean(), mb = B.GetMean();
            double   va = A.GetVariance(), vb = B.GetVariance();
            Gaussian result = Gaussian.FromMeanAndVariance(ma * mb, va * vb + va * mb * mb + vb * ma * ma);

            return(result);
        }
예제 #12
0
        public static ArrayType ArrayAverageConditional <ArrayType>([SkipIfUniform] Gaussian sumExcept, IReadOnlyList <Gaussian> array, int index, ArrayType result)
            where ArrayType : IList <Gaussian>
        {
            if (sumExcept.Precision == 0 || array.Count <= 2)
            {
                for (int i = 0; i < result.Count; i++)
                {
                    if (i == index)
                    {
                        result[i] = Gaussian.Uniform();
                    }
                    else
                    {
                        result[i] = sumExcept;
                    }
                }
                return(result);
            }
            sumExcept.GetMeanAndVarianceImproper(out double sumMean, out double sumVariance);
            double[] means     = new double[array.Count];
            double[] variances = new double[array.Count];
            for (int i = 0; i < array.Count; i++)
            {
                // could generalize this to be predicate(i)
                if (i == index)
                {
                    continue;
                }
                array[i].GetMeanAndVarianceImproper(out double mean1, out double variance1);
                means[i]     = mean1;
                variances[i] = variance1;
            }
            double[] meanPrevious     = new double[array.Count];
            double[] variancePrevious = new double[array.Count];
            for (int i = 1; i < array.Count; i++)
            {
                // i == index doesn't matter since it will have mean=0, variance=0
                meanPrevious[i]     = meanPrevious[i - 1] + means[i - 1];
                variancePrevious[i] = variancePrevious[i - 1] + variances[i - 1];
            }
            double meanNext     = 0;
            double varianceNext = 0;

            for (int i = array.Count - 1; i >= 0; i--)
            {
                if (i == index)
                {
                    result[i] = Gaussian.Uniform();
                    continue;
                }
                double sumOtherMeans     = meanPrevious[i] + meanNext;
                double sumOtherVariances = variancePrevious[i] + varianceNext;
                result[i]     = Gaussian.FromMeanAndVariance(sumMean - sumOtherMeans, sumVariance + sumOtherVariances);
                meanNext     += means[i];
                varianceNext += variances[i];
            }
            return(result);
        }
예제 #13
0
 /// <summary>
 /// Generate priors from the weight and threshold variance.
 /// </summary>
 /// <param name="featureBuckets">The feature buckets.</param>
 /// <param name="weightVariance">The weight variance.</param>
 /// <param name="thresholdVariance">The threshold variance.</param>
 /// <returns>
 /// The <see cref="Priors" />
 /// </returns>
 internal static Priors Generate(ICollection <FeatureBucket> featureBuckets, double weightVariance, double thresholdVariance)
 {
     return(new Priors
     {
         Weights = featureBuckets.ToDictionary(ia => ia, ia => Gaussian.FromMeanAndVariance(0.0, weightVariance)),
         Threshold = Gaussian.FromMeanAndVariance(0.0, thresholdVariance),
         NoiseVariance = thresholdVariance
     });
 }
예제 #14
0
        /// <summary>
        /// Find the Laplace approximation for Beta(Logistic(x)) * Gaussian(x))
        /// </summary>
        /// <param name="beta">Beta distribution</param>
        /// <param name="gauss">Gaussian distribution</param>
        /// <returns>A proposal distribution</returns>
        public static Gaussian LogisticProposalDistribution(Beta beta, Gaussian gauss)
        {
            if (beta.IsUniform())
            {
                return(new Gaussian(gauss));
            }

            // if gauss is uniform, m,p = 0 below, and the following code will just ignore the Gaussian
            // and do a Laplace approximation for Beta(Logistic(x))

            double c = beta.TrueCount - 1;
            double d = beta.FalseCount - 1;
            double m = gauss.GetMean();
            double p = gauss.Precision;
            // We want to find the mode of
            // ln(g(x)) = c.ln(f(x)) + d.ln(1 - f(x)) - 0.5p((x - m)^2) + constant
            // First deriv:
            // h(x) = (ln(g(x))' = c.(1 - f(x)) - d.f(x) - p(x-m)
            // Second deriv:
            // h'(x) = (ln(g(x))' = -(c+d).f'(x) - p
            // Use Newton-Raphson to find unique root of h(x).
            // g(x) is log-concave so Newton-Raphson should converge quickly.
            // Set the initial point by projecting beta
            // to a Gaussian and taking the mean of the product:
            double bMean, bVar;

            beta.GetMeanAndVariance(out bMean, out bVar);
            Gaussian prod            = new Gaussian();
            double   invLogisticMean = Math.Log(bMean) - Math.Log(1.0 - bMean);

            prod.SetToProduct(Gaussian.FromMeanAndVariance(invLogisticMean, bVar), gauss);
            double xnew = prod.GetMean();
            double x = 0, fx, dfx, hx, dhx = 0;
            int    maxIters = 100; // Should only need a handful of iters
            int    cnt      = 0;

            do
            {
                x   = xnew;
                fx  = MMath.Logistic(x);
                dfx = fx * (1.0 - fx);
                // Find the root of h(x)
                hx   = c * (1.0 - fx) - d * fx - p * (x - m);
                dhx  = -(c + d) * dfx - p;
                xnew = x - (hx / dhx); // The Newton step
                if (Math.Abs(x - xnew) < 0.00001)
                {
                    break;
                }
            } while (++cnt < maxIters);
            if (cnt >= maxIters)
            {
                throw new InferRuntimeException("Unable to find proposal distribution mode");
            }
            return(Gaussian.FromMeanAndPrecision(x, -dhx));
        }
        /// <summary>
        /// VMP message to 'product'
        /// </summary>
        /// <param name="A">Incoming message from 'a'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <param name="B">Incoming message from 'b'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <returns>The outgoing VMP message to the 'product' argument</returns>
        /// <remarks><para>
        /// The outgoing message is a distribution matching the moments of 'product' as the random arguments are varied.
        /// The formula is <c>proj[sum_(a,b) p(a,b) factor(product,a,b)]</c>.
        /// </para></remarks>
        /// <exception cref="ImproperMessageException"><paramref name="A"/> is not a proper distribution</exception>
        /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception>
        public static Gaussian ProductAverageLogarithm([SkipIfUniform] Gaussian A, [SkipIfUniform] Gamma B)
        {
            double ma = A.GetMean(), mb = B.GetMean();
            double va = A.GetVariance(), vb = B.GetVariance();

            if (Double.IsPositiveInfinity(va) || Double.IsPositiveInfinity(vb))
            {
                return(Gaussian.Uniform());
            }
            return(Gaussian.FromMeanAndVariance(ma * mb, va * vb + va * mb * mb + vb * ma * ma));
        }
        /// <summary>
        /// VMP message to 'product'
        /// </summary>
        /// <param name="A">Constant value for 'a'.</param>
        /// <param name="B">Incoming message from 'b'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <returns>The outgoing VMP message to the 'product' argument</returns>
        /// <remarks><para>
        /// The outgoing message is a distribution matching the moments of 'product' as the random arguments are varied.
        /// The formula is <c>proj[sum_(b) p(b) factor(product,a,b)]</c>.
        /// </para></remarks>
        /// <exception cref="ImproperMessageException"><paramref name="B"/> is not a proper distribution</exception>
        public static Gaussian ProductAverageLogarithm(double A, [SkipIfUniform] Gamma B)
        {
            double mb, vb;

            B.GetMeanAndVariance(out mb, out vb);
            if (Double.IsPositiveInfinity(vb))
            {
                return(Gaussian.Uniform());
            }
            return(Gaussian.FromMeanAndVariance(A * mb, A * A * vb));
        }
예제 #17
0
 public static DistributionArray2D <Gaussian, double> RandomGaussianArray(int C, int d)
 {
     Gaussian[,] array = new Gaussian[C, d];
     for (int i = 0; i < C; i++)
     {
         for (int j = 0; j < d; j++)
         {
             array[i, j] = Gaussian.FromMeanAndVariance(Rand.Normal(), 1);
         }
     }
     return((DistributionArray2D <Gaussian, double>) Distribution <double> .Array(array));
 }
예제 #18
0
 /// <summary>
 /// Create an array of Gaussian distributions with random mean and unit variance
 /// </summary>
 /// <param name="row">Number of rows</param>
 /// <param name="col">Number of columns</param>
 /// <returns>The array as a distribution over a 2-D double array domain</returns>
 private static IDistribution <double[, ]> randomGaussianArray(int row, int col)
 {
     Gaussian[,] array = new Gaussian[row, col];
     for (int i = 0; i < row; i++)
     {
         for (int j = 0; j < col; j++)
         {
             array[i, j] = Gaussian.FromMeanAndVariance(Rand.Normal(), 1);
         }
     }
     return(Distribution <double> .Array(array));
 }
예제 #19
0
 public void GaussianOpX2Test()
 {
     using (TestUtils.TemporarilyAllowGaussianImproperMessages)
     {
         Gamma    precision = Gamma.FromShapeAndScale(3, 3);
         Gaussian mean;
         mean = Gaussian.PointMass(7);
         GaussianOpX2(mean, precision);
         mean = Gaussian.FromMeanAndVariance(7, 1.0 / 3);
         GaussianOpX2(mean, precision);
     }
 }
        /// <summary>
        /// EP message to 'product'.
        /// </summary>
        /// <param name="Product">Incoming message from 'product'.</param>
        /// <param name="A">Incoming message from 'a'.</param>
        /// <param name="B">Incoming message from 'b'.</param>
        /// <returns>The outgoing EP message to the 'product' argument.</returns>
        /// <remarks><para>
        /// The outgoing message is the integral of the factor times incoming messages, over all arguments except 'product'.
        /// The formula is <c>int f(product,x) q(x) dx</c> where <c>x = (a,b)</c>.
        /// </para></remarks>
        public static Gaussian ProductAverageConditional(Gaussian Product, Gaussian A, Beta B)
        {
            A.GetMeanAndVariance(out mA, out vA);
            double mB, vB;

            B.GetMeanAndVariance(out mB, out vB);
            double mProduct, vProduct;

            Product.GetMeanAndVariance(out mProduct, out vProduct);
            // algorithm: quadrature on A from -1 to 1, plus quadrature on 1/A from -1 to 1.
            double z = 0, sumX = 0, sumX2 = 0;

            for (int i = 0; i <= QuadratureNodeCount; i++)
            {
                double a     = (2.0 * i) / QuadratureNodeCount - 1;
                double logfA = Gaussian.GetLogProb(mProduct, a * mB, vProduct + a * a * vB) + Gaussian.GetLogProb(a, mA, vA);
                double fA    = Math.Exp(logfA);

                z += fA;
                double b  = (mB * vProduct + a * mProduct * vB) / (vProduct + a * a * vB);
                double b2 = b * b + (vProduct * vB) / (vProduct + a * a * vB);
                double x  = a * b;
                double x2 = a * a * b2;
                sumX  += x * fA;
                sumX2 += x2 * fA;

                double invA = a;
                a = 1.0 / invA;
                double logfInvA = Gaussian.GetLogProb(mProduct * invA, mB, vProduct * invA * invA + vB) + Gaussian.GetLogProb(a, mA, vA) - Math.Log(Math.Abs(invA + Double.Epsilon));
                double fInvA    = Math.Exp(logfInvA);
                z     += fInvA;
                b      = (mB * vProduct + a * mProduct * vB) / (vProduct + a * a * vB);
                b2     = b * b + (vProduct * vB) / (vProduct + a * a * vB);
                x      = a * b;
                x2     = a * a * b2;
                sumX  += x * fInvA;
                sumX2 += x2 * fInvA;
            }
            double   mean   = sumX / z;
            double   var    = sumX2 / z - mean * mean;
            Gaussian result = Gaussian.FromMeanAndVariance(mean, var);

            if (ForceProper)
            {
                result.SetToRatioProper(result, Product);
            }
            else
            {
                result.SetToRatio(result, Product);
            }
            return(result);
        }
예제 #21
0
        /// <summary>VMP message to <c>productExp</c>.</summary>
        /// <param name="A">Constant value for <c>a</c>.</param>
        /// <param name="B">Incoming message from <c>b</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <returns>The outgoing VMP message to the <c>productExp</c> argument.</returns>
        /// <remarks>
        ///   <para>The outgoing message is a distribution matching the moments of <c>productExp</c> as the random arguments are varied. The formula is <c>proj[sum_(b) p(b) factor(productExp,a,b)]</c>.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="B" /> is not a proper distribution.</exception>
        public static Gaussian ProductExpAverageLogarithm(double A, [SkipIfUniform] Gaussian B)
        {
            double mb, vb;

            B.GetMeanAndVariance(out mb, out vb);
            if (Double.IsPositiveInfinity(vb))
            {
                return(Gaussian.Uniform());
            }
            double mp = A * Math.Exp(mb + vb / 2.0);

            return(Gaussian.FromMeanAndVariance(mp, A * A * Math.Exp(2.0 * (mb + vb)) - mp * mp));
        }
        public void IndexOfMaximumFastTest()
        {
            int   n      = 5;
            Range item   = new Range(n).Named("item");
            var   priors = Variable <Gaussian> .Array(item);

            priors.ObservedValue = Util.ArrayInit(n, i => Gaussian.FromMeanAndVariance(i * 0.5, i));
            var x = Variable.Array <double>(item).Named("x");

            x[item] = Variable <double> .Random(priors[item]);

            var y = Variable <int> .Factor(MMath.IndexOfMaximumDouble, x);

            InferenceEngine engine = new InferenceEngine();

            engine.ShowProgress = false;
            string format  = "f4";
            var    yActual = engine.Infer <Discrete>(y);

            Console.WriteLine("Quadratic: {0}", yActual.ToString(format));

            // Monte Carlo estimate
            Rand.Restart(0);
            DiscreteEstimator est = new DiscreteEstimator(n);

            for (int iter = 0; iter < 100000; iter++)
            {
                double[] samples = Util.ArrayInit(n, i => priors.ObservedValue[i].Sample());
                int      argmax  = MMath.IndexOfMaximumDouble(samples);
                est.Add(argmax);
            }
            var yExpected = est.GetDistribution(Discrete.Uniform(n));

            Console.WriteLine("Sampling:  {0}", yExpected.ToString(format));
            Assert.True(yExpected.MaxDiff(yActual) < 1e-2);

            engine.Compiler.GivePriorityTo(typeof(IndexOfMaximumOp_Fast));
            yActual = engine.Infer <Discrete>(y);
            Console.WriteLine("Linear:    {0}", yActual.ToString(format));
            Assert.True(yExpected.MaxDiff(yActual) < 1e-2);

            bool compareApproximation = false;

            if (compareApproximation)
            {
                var yPost2 = IndexOfMaximumOp_Fast.IndexOfMaximumDoubleAverageConditional(priors.ObservedValue, Discrete.Uniform(n));
                Console.WriteLine(yPost2);
                var yPost3 = IndexOfMaximumOp_Fast.IndexOfMaximumDoubleAverageConditional2(priors.ObservedValue, Discrete.Uniform(n));
                Console.WriteLine(yPost3);
            }
        }
예제 #23
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="InnerProductArrayOp"]/message_doc[@name="InnerProductAverageConditional(double[], IList{Gaussian})"]/*'/>
        public static Gaussian InnerProductAverageConditional(double[] A, [SkipIfUniform] IList <Gaussian> B)
        {
            double xMean = 0, xVariance = 0,
                   bMean, bVariance;

            for (int k = 0; k < A.Length; k++)
            {
                B[k].GetMeanAndVariance(out bMean, out bVariance);
                xMean     += A[k] * bMean;
                xVariance += A[k] * A[k] * bVariance;
            }

            return(Gaussian.FromMeanAndVariance(xMean, xVariance));
        }
예제 #24
0
        static void Main(string[] args)
        {
            double[][] trainingData = new double[2][];
            trainingData[0] = new double[] { 0.5, 0.6, 0.7, 0.24 };
            trainingData[1] = new double[] { 0.4, 0.3, 0.55, 0.95 };

            Gaussian alphaPrior   = Gaussian.FromMeanAndVariance(1, 0.002);
            Gaussian betaPrior    = Gaussian.FromMeanAndVariance(1, 0.002);
            Gaussian genesT1Prior = Gaussian.FromMeanAndVariance(1, 0.002);
            Gaussian wPrior       = Gaussian.FromMeanAndVariance(0, 1);

            int nGenes = trainingData[0].Length;

            NetModelData initPriors = new NetModelData(
                Util.ArrayInit(nGenes, u => genesT1Prior),
                Util.ArrayInit(nGenes, u => alphaPrior),
                Util.ArrayInit(nGenes, u => betaPrior),
                Util.ArrayInit(nGenes, u => Util.ArrayInit(nGenes - 1, t => wPrior))
                ); // w   -> variance : Variable.GammaFromShapeAndRate(1, 1)
            //Train the model
            PertNetModel pertNetModel = new PertNetModel();

            Console.WriteLine("number of genes: " + trainingData[0].Length);
            pertNetModel.CreateModel(trainingData[0].Length);
            pertNetModel.SetModelData(initPriors);


            NetModelData posteriors1 = pertNetModel.InferModelData(trainingData);

            Gaussian[][] x = posteriors1.wDist;
            for (int i = 0; i < posteriors1.wDist.Length; i++)
            {
                for (int j = 0; j < posteriors1.wDist[i].Length; j++)
                {
                    Console.WriteLine(posteriors1.wDist[i][j]);
                }
            }
            //Console.WriteLine("Inferred w = " + posteriors1.wDist);
            // Console.WriteLine("===================");
            // Console.WriteLine(pertNetModel.w);
            //////////////////////////
            double[][] trainingData2 = new double[2][];
            trainingData2[0] = new double[] { 0.25, 0.16, 0.73, 0.4 };
            trainingData2[1] = new double[] { 0.94, 0.43, 0.25, 0.65 };

            pertNetModel.SetModelData(posteriors1);
            NetModelData posteriors2 = pertNetModel.InferModelData(trainingData2);

            Console.ReadLine();
        }
예제 #25
0
        /// <summary>VMP message to <c>productExp</c>.</summary>
        /// <param name="A">Incoming message from <c>a</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <param name="B">Incoming message from <c>b</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <returns>The outgoing VMP message to the <c>productExp</c> argument.</returns>
        /// <remarks>
        ///   <para>The outgoing message is a distribution matching the moments of <c>productExp</c> as the random arguments are varied. The formula is <c>proj[sum_(a,b) p(a,b) factor(productExp,a,b)]</c>.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="A" /> is not a proper distribution.</exception>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="B" /> is not a proper distribution.</exception>
        public static Gaussian ProductExpAverageLogarithm([SkipIfUniform] Gaussian A, [SkipIfUniform] Gaussian B)
        {
            double ma, mb, va, vb;

            A.GetMeanAndVariance(out ma, out va);
            B.GetMeanAndVariance(out mb, out vb);
            if (Double.IsPositiveInfinity(va) || Double.IsPositiveInfinity(vb))
            {
                return(Gaussian.Uniform());
            }
            double mp = ma * Math.Exp(mb + vb / 2.0);

            return(Gaussian.FromMeanAndVariance(mp, (va + ma * ma) * Math.Exp(2.0 * (mb + vb)) - mp * mp));
        }
예제 #26
0
        /// <summary>
        /// EP message to 'logOdds'.
        /// </summary>
        /// <param name="sample">Constant value for sample.</param>
        /// <param name="logOdds">Incoming message from 'logOdds'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
        /// <returns>The outgoing EP message to the 'logOdds' argument.</returns>
        /// <remarks><para>
        /// The outgoing message is the moment matched Gaussian approximation to the factor.
        /// </para></remarks>
        public static Gaussian LogOddsAverageConditional(bool sample, [SkipIfUniform] Gaussian logOdds)
        {
            double m, v;

            logOdds.GetMeanAndVariance(out m, out v);
            double s = sample ? 1 : -1;

            m *= s;
            if (m + 1.5 * v < -38)
            {
                double beta2 = Math.Exp(m + 1.5 * v);
                return(Gaussian.FromMeanAndVariance(s * (m + v), v * (1 - v * beta2)) / logOdds);
            }
            double sigma0 = MMath.LogisticGaussian(m, v);
            double sigma1 = MMath.LogisticGaussianDerivative(m, v);
            double sigma2 = MMath.LogisticGaussianDerivative2(m, v);
            double alpha, beta;

            alpha = sigma1 / sigma0;
            if (Double.IsNaN(alpha))
            {
                throw new Exception("alpha is NaN");
            }
            if (m + 2 * v < -19)
            {
                beta = Math.Exp(3 * m + 2.5 * v) / (sigma0 * sigma0);
            }
            else
            {
                //beta = (sigma1*sigma1 - sigma2*sigma0)/(sigma0*sigma0);
                beta = alpha * alpha - sigma2 / sigma0;
            }
            if (Double.IsNaN(beta))
            {
                throw new Exception("beta is NaN");
            }
            double m2 = s * (m + v * alpha);
            double v2 = v * (1 - v * beta);

            if (v2 > v)
            {
                throw new Exception("v2 > v");
            }
            if (v2 < 0)
            {
                throw new Exception("v2 < 0");
            }
            return(Gaussian.FromMeanAndVariance(m2, v2) / logOdds);
        }
 public void LogisticProposalDistribution()
 {
     double[] TrueCounts  = { 2, 20, 200, 2000, 20000, 2, 20, 200, 2000, 20000 };
     double[] FalseCounts = { 2, 200, 20000, 200, 20, 200, 2000, 2, 2000, 20 };
     double[] Means       = { .5, 1, 2, 4, 8, 16, 32, 0, -2, -20 };
     double[] Variances   = { 1, 2, 4, 8, .1, .0001, .01, .000001, 0.000001, 0.001 };
     for (int i = 0; i < 10; i++)
     {
         Beta b = new Beta();
         b.TrueCount  = TrueCounts[i];
         b.FalseCount = FalseCounts[i];
         Gaussian g         = Gaussian.FromMeanAndVariance(Means[i], Variances[i]);
         Gaussian gProposal = GaussianBetaProductOp.LogisticProposalDistribution(b, g);
     }
 }
예제 #28
0
 /// <summary>
 /// Priors from posteriors with possible missing features.
 /// </summary>
 /// <param name="posteriors">The posteriors.</param>
 /// <param name="featureSet">The feature set.</param>
 /// <param name="noiseVariance">The noise variance.</param>
 /// <returns>The <see cref="Priors" /></returns>
 internal static Priors FromPosteriors(Posteriors posteriors, FeatureSet featureSet, double noiseVariance)
 {
     return(new Priors
     {
         Weights =
             featureSet.FeatureBuckets.ToDictionary(
                 ia => ia,
                 ia =>
                 posteriors.Weights.ContainsKey(ia)
                                ? posteriors.Weights[ia]
                                : Gaussian.FromMeanAndVariance(0.0, noiseVariance)),
         Threshold = posteriors.Threshold,
         NoiseVariance = noiseVariance,
     });
 }
예제 #29
0
        public void ReadXml(XmlReader reader)
        {
            reader.MoveToFirstAttribute();
            DynamicPopularityFactor = double.Parse(reader.GetAttribute("dynamic"));
            ScoreAccuracy           = Gaussian.FromMeanAndVariance(
                double.Parse(reader.GetAttribute("scoreAccuracyMean")),
                double.Parse(reader.GetAttribute("scoreAccuracyVariance")));

            while (reader.ReadToFollowing("participant"))
            {
                var p = new Participant();
                p.ReadXml(reader);
                Participants.Add(p.Id, p);
            }
        }
예제 #30
0
        /// <summary>
        /// The entry point of the program, where the program control starts and ends.
        /// </summary>
        public static void Main()
        {
            const int  T = 100;
            const int  K = 2;
            const int  N = 5;
            const bool showFactorGraph = false;

            TestHMM <ContinousHMM, double, double, Gaussian, Gaussian, double, Gamma, double>(
                T,
                K,
                1,
                Gaussian.FromMeanAndPrecision,
                () => Gaussian.FromMeanAndVariance(0, 1000),
                () => Gamma.FromShapeAndScale(1000, 0.001),
                showFactorGraph);

            // TModel, TEmit, TEmitDist, TEmitMeanDist, TEmitMean, TEmitPrecDist, TEmitPrec
            TestHMM <MultivariateHMM, Vector, Vector, VectorGaussian, VectorGaussian, Vector, Wishart, PositiveDefiniteMatrix>(
                T,
                K,
                1,
                VectorGaussian.FromMeanAndPrecision,
                () => VectorGaussian.FromMeanAndVariance(Vector.Zero(N), PositiveDefiniteMatrix.IdentityScaledBy(N, 1000)),
                () => Wishart.FromShapeAndScale(N, PositiveDefiniteMatrix.IdentityScaledBy(N, 0.001)),
                showFactorGraph);

            TestHMM <BinaryHMM, bool, double, Bernoulli, Beta, double, Beta, double>(
                T,
                K,
                1,
                (m, p) => new Bernoulli(m),
                () => new Beta(1, 1),
                null,
                showFactorGraph);

            TestHMM <DiscreteHMM, int, double, Discrete, Dirichlet, Vector, Dirichlet, Vector>(
                T,
                K,
                N,
                (m, p) => new Discrete(m),
                () => Dirichlet.Uniform(N),
                null,
                showFactorGraph);

            // TestBinaryHiddenMarkovModel();
            // TestDiscreteHiddenMarkovModel();
            // TestMultivariateHMM();
        }