static void Main(){
     
     Stopwatch timer = new Stopwatch();
     timer.Start();
     
     
     Alpha oAlpha = new Alpha();
     Beta oBeta = new Beta();
     
     // create the threads
     Thread aThread = new Thread(new ThreadStart(oAlpha.Counter));
     Thread bThread = new Thread(new ThreadStart(oBeta.Counter));
     
     
     // start the threads
     aThread.Start();
     bThread.Start();
     
     // spin until both threads have finished
     while(aThread.IsAlive && bThread.IsAlive);
     
     timer.Stop();
     
     TimeSpan ts = timer.Elapsed;
     
     Console.WriteLine("Execution Time: " + ts);
     
     
     
     
 }
Example #2
0
    static void Main() 
    { 
        Alpha objA = new Alpha(4); 
        Beta objВ = new Beta (9); 
        // Продемонстрировать сначала контравариантность. 
        // Объявить делегат SomeOp<Alpha> и задать для него метод IsEven. 
        SomeOp<Alpha> checklt = IsEven;

        // Объявить делегат SomeOp<Beta>. 
        SomeOp<Beta> checklt2;
        //А теперь- присвоить делегат SomeOp<Alpha> делегату SomeOp<Beta>. 
        // *** Это допустимо только благодаря контравариантности. *** 
        checklt2 = checklt;
        // Вызвать метод через делегат. 
        Console.WriteLine(checklt2(objВ));
        // Далее, продемонстрировать контравариантность. 
        // Объявить сначала два делегата типа AnotherOp. 
        // Здесь возвращаемым типом является класс Beta, 
        //а параметром типа — класс Alpha. 
        // Обратите внимание на то, что для делегата modifylt 
        // задается метод Changelt. 
        AnotherOp<Beta, Alpha> modifylt = Changelt;
        // Здесь возвращаемым типом является класс Alpha, 
        //а параметром типа — тот же класс Alpha. 
        AnotherOp<Alpha, Alpha> modifyIt2;
        // А теперь присвоить делегат modifylt делегату modifyIt2. 
        // *** Это допустимо только благодаря ковариантности. *** 
        modifyIt2 = modifylt;
        // Вызвать метод и вывести результаты на экран. 
        objA = modifyIt2(objA);
        Console.WriteLine(objA.Val);
    }
Example #3
0
		// Sample data from a DINA/NIDA model and then use Infer.NET to recover the parameters.
		public void Run()
		{
			InferenceEngine engine = new InferenceEngine();
			if (!(engine.Algorithm is ExpectationPropagation))
			{
				Console.WriteLine("This example only runs with Expectation Propagation");
				return;
			}

			bool useDina = true;
			Beta slipPrior = new Beta(1, 10);
			Beta guessPrior = new Beta(1, 10);
			Rand.Restart(0);
			int nStudents = 100;
			int nQuestions = 20;
			int nSkills = 3;
			int[][] skillsRequired = new int[nQuestions][];
			for (int q = 0; q < nQuestions; q++) {
				// each question requires a random set of skills
				int[] skills = Rand.Perm(nSkills);
				int n = Rand.Int(nSkills)+1;
				skillsRequired[q] = Util.ArrayInit(n, i => skills[i]);
				Console.WriteLine("skillsRequired[{0}] = {1}", q, Util.CollectionToString(skillsRequired[q]));
			}
			double[] pSkill, slip, guess;
			bool[][] hasSkill;
			VariableArray<double> slipVar, guessVar, pSkillVar;
			VariableArray<VariableArray<bool>,bool[][]> hasSkillVar;
			if (useDina) {
				bool[][] responses = DinaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				DinaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			} else {
				bool[][] responses = NidaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
				NidaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
			}

			engine.NumberOfIterations = 10;
			Bernoulli[][] hasSkillPost = engine.Infer<Bernoulli[][]>(hasSkillVar);
			int numErrors = 0;
			for (int i = 0; i < nStudents; i++) {
				for (int s = 0; s < nSkills; s++) {
					if (hasSkill[i][s] != (hasSkillPost[i][s].LogOdds > 0)) numErrors++;
				}
			}
			Console.WriteLine("{0:0}% of skills recovered correctly", 100.0 - 100.0*numErrors/(nStudents*nSkills));
			Beta[] pSkillPost = engine.Infer<Beta[]>(pSkillVar);
			Beta[] slipPost = engine.Infer<Beta[]>(slipVar);
			Beta[] guessPost = engine.Infer<Beta[]>(guessVar);
			for (int s = 0; s < nSkills; s++) {
				Console.WriteLine("pSkill[{0}] = {1} (sampled from {2})", s, pSkillPost[s], pSkill[s].ToString("g4"));				
			}
			for (int i = 0; i < Math.Min(3,slipPost.Length); i++)	{
				Console.WriteLine("slip[{0}] = {1} (sampled from {2})", i, slipPost[i], slip[i].ToString("g4"));			 
			}
			for (int i = 0; i < Math.Min(3,guessPost.Length); i++) {
				Console.WriteLine("guess[{0}] = {1} (sampled from {2})", i, guessPost[i], guess[i].ToString("g4"));
			}
		}
		/// <summary>
		/// Evidence message for EP
		/// </summary>
		/// <param name="sample">Constant value for 'sample'.</param>
		/// <param name="p">Incoming message from 'p'.</param>
		/// <param name="trialCount">Incoming message from 'trialCount'.</param>
		/// <returns>Logarithm of the factor's average value across the given argument distributions</returns>
		/// <remarks><para>
		/// The formula for the result is <c>log(sum_(p,trialCount) p(p,trialCount) factor(sample,trialCount,p))</c>.
		/// </para></remarks>
		public static double LogAverageFactor(int sample, Beta p, Discrete trialCount)
		{
			double logZ = Double.NegativeInfinity;
			for (int n = 0; n < trialCount.Dimension; n++)
			{
				logZ = MMath.LogSumExp(logZ, trialCount.GetLogProb(n) + LogAverageFactor(sample, p, n));
			}
			return logZ;
		}
Example #5
0
		public void Infer(bool[] treated, bool[] placebo)
		{
			// Set the observed values
			numberPlacebo.ObservedValue = placebo.Length;
			numberTreated.ObservedValue = treated.Length;
			placeboGroupOutcomes.ObservedValue = placebo;
			treatedGroupOutcomes.ObservedValue = treated;

			// Infer the hidden values
			posteriorTreatmentIsEffective = engine.Infer<Bernoulli>(isEffective);
			posteriorProbIfPlacebo = engine.Infer<Beta>(probIfPlacebo);
			posteriorProbIfTreated = engine.Infer<Beta>(probIfTreated);
		}
Example #6
0
		private static void drawBetaDistribution(ListBox lb, Beta dist)
		{
			lb.Items.Clear();
			int numItems = (int)(lb.Width/2.0);
			double max = 6.0;
			double mult = ((double)lb.Height) / max;
			double inc = 1.0 / ((double)(numItems-1));
			double curr = 0.0;
			lb.Margin = new Thickness(0);
			for (int i=0; i < numItems; i++)
			{
				if (curr > 1.0)
					curr = 1.0;
				double d = Math.Exp(dist.GetLogProb(curr));
				double height = mult * d;
				lb.Items.Add(new Rectangle() {Margin = new Thickness(-2,0,0,0), Height=height, Width=2, Fill= Brushes.Yellow, ClipToBounds = true });
				curr += inc;
			}
		}
Example #7
0
		private static void drawBetaDistribution(ListBox lb, Beta dist)
		{
			lb.Items.Clear();
			int numItems = (int)(lb.ActualWidth/3.0);
			double max = 6.0;
			double mult = ((double)lb.ActualHeight) / max;
			double inc = 1.0 / ((double)(numItems-1));

			double curr = 0.0;
			for (int i=0; i < numItems; i++)
			{
				if (curr > 1.0)
					curr = 1.0;
				double d = Math.Exp(dist.GetLogProb(curr));
				double height = mult * d;
				lb.Items.Add(new Rectangle() { Height=height, Width=2, Fill= Yellow, VerticalAlignment= VerticalAlignment.Bottom});
				curr += inc;
			}
		}
Example #8
0
		private static void drawBetaDistribution(Rectangle rect, Beta dist)
		{
			GradientStopCollection gsc = new GradientStopCollection();
			int numStops = 21;
			double mean = dist.GetMean();
			double meanDensity = Math.Exp(dist.GetLogProb(mean));
			double inc = 1.0 / (numStops-1);
			double curr = 0.0;
			double maxLogProb = Double.MinValue;
			double minLogProb = -5.0;
			for (int i=0; i < numStops; i++)
			{
				double logProb = dist.GetLogProb(curr);
				if (logProb > maxLogProb) maxLogProb = logProb;
				curr += inc;
			}
			if (maxLogProb <= minLogProb)
				maxLogProb = minLogProb + 1.0;
			double diff = maxLogProb - minLogProb;
			double mult =  1.0 / (maxLogProb - minLogProb);
			curr = 0.0;
			double blueLeft = 0; double blueRight = 0;
			double redLeft = 255; double redRight = 255;
			double greenLeft = 255; double greenRight = 255;

			for (int i=0; i < numStops; i++)
			{
				double red, green, blue;
				double logProb = dist.GetLogProb(curr);
				if (logProb < minLogProb) logProb = minLogProb;
				double level = mult * (logProb - minLogProb);
				red = level * (mean * redRight + (1.0 - mean) * redLeft);
				green = level * (mean * greenRight + (1.0 - mean) * greenLeft);
				blue =level * (mean * blueRight + (1.0 - mean) * blueLeft);
				byte redb = red < 0.0 ? (byte)0 : red > 255.0 ? (byte)255 : (byte)red;
				byte greenb = green < 0.0 ? (byte)0 : green > 255.0 ? (byte)255 : (byte)green;
				byte blueb = blue < 0.0 ? (byte)0 : blue > 255.0 ? (byte)255 : (byte)blue;
				gsc.Add(new GradientStop { Color =new Color() { A = 255, R = redb, G = greenb, B = blueb } , Offset = curr});
    			curr += inc;
			}
			LinearGradientBrush brush = rect.Fill as LinearGradientBrush;
			brush.GradientStops = gsc;
		}
Example #9
0
		// Sample data from the DINA model
		public static bool[][] DinaSample(int nStudents, int nSkills, int[][] skillsRequired, Beta slipPrior, Beta guessPrior, 
			out double[] pSkillOut, out double[] slip, out double[] guess, out bool[][] hasSkill)
		{
			int nQuestions = skillsRequired.Length;
			double[] pSkill = Util.ArrayInit(nSkills, q => Rand.Double());
			slip = Util.ArrayInit(nQuestions, q => slipPrior.Sample());
			guess = Util.ArrayInit(nQuestions, q => guessPrior.Sample());
			hasSkill = Util.ArrayInit(nStudents, t => Util.ArrayInit(nSkills, s => Rand.Double() < pSkill[s]));
			bool[][] responses = new bool[nStudents][];
			for (int t = 0; t < nStudents; t++) {
				responses[t] = new bool[nQuestions];
				for (int q = 0; q < nQuestions; q++) {
					bool hasAllSkills = Factor.AllTrue(Factor.Subarray(hasSkill[t], skillsRequired[q]));
					if (hasAllSkills) responses[t][q] = (Rand.Double() > slip[q]);
					else responses[t][q] = (Rand.Double() < guess[q]);
				}
			}
			pSkillOut = pSkill;
			return responses;
		}
Example #10
0
        public void ValidateToString()
        {
            var n = new Beta(1d, 2d);

            Assert.AreEqual("Beta(α = 1, β = 2)", n.ToString());
        }
Example #11
0
        /// <summary>Evidence message for EP.</summary>
        /// <param name="logistic">Incoming message from <c>logistic</c>.</param>
        /// <param name="x">Incoming message from <c>x</c>.</param>
        /// <param name="falseMsg">Buffer <c>falseMsg</c>.</param>
        /// <returns>Logarithm of the factor's average value across the given argument distributions.</returns>
        /// <remarks>
        ///   <para>The formula for the result is <c>log(sum_(logistic,x) p(logistic,x) factor(logistic,x))</c>.</para>
        /// </remarks>
        public static double LogAverageFactor(Beta logistic, Gaussian x, Gaussian falseMsg)
        {
            // return log(int_y int_x delta(y - Logistic(x)) Beta(y) Gaussian(x) dx dy)
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            if (logistic.TrueCount == 2 && logistic.FalseCount == 1)
            {
                // shortcut for common case
                return(Math.Log(2 * MMath.LogisticGaussian(m, v)));
            }
            else if (logistic.TrueCount == 1 && logistic.FalseCount == 2)
            {
                return(Math.Log(2 * MMath.LogisticGaussian(-m, v)));
            }
            else
            {
                // logistic(sigma(x)) N(x;m,v)
                // = sigma(x)^(a-1) sigma(-x)^(b-1) N(x;m,v) gamma(a+b)/gamma(a)/gamma(b)
                // = e^((a-1)x) sigma(-x)^(a+b-2) N(x;m,v)
                // = sigma(-x)^(a+b-2) N(x;m+(a-1)v,v) exp((a-1)m + (a-1)^2 v/2)
                // int_x logistic(sigma(x)) N(x;m,v) dx
                // =approx (int_x sigma(-x)/falseMsg(x) falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v))^(a+b-2)
                //       * (int_x falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v))^(1 - (a+b-2))
                //       *  exp((a-1)m + (a-1)^2 v/2) gamma(a+b)/gamma(a)/gamma(b)
                // This formula comes from (66) in Minka (2005)
                // Alternatively,
                // =approx (int_x falseMsg(x)/sigma(-x) falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v))^(-(a+b-2))
                //       * (int_x falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v))^(1 + (a+b-2))
                //       *  exp((a-1)m + (a-1)^2 v/2) gamma(a+b)/gamma(a)/gamma(b)
                double   tc1   = logistic.TrueCount - 1;
                double   fc1   = logistic.FalseCount - 1;
                Gaussian prior = new Gaussian(m + tc1 * v, v);
                if (tc1 + fc1 < 0)
                {
                    // numerator2 = int_x falseMsg(x)^(a+b-1) N(x;m+(a-1)v,v) dx
                    double   numerator2 = prior.GetLogAverageOfPower(falseMsg, tc1 + fc1 + 1);
                    Gaussian prior2 = prior * (falseMsg ^ (tc1 + fc1 + 1));
                    double   mp, vp;
                    prior2.GetMeanAndVariance(out mp, out vp);
                    // numerator = int_x (1+exp(x)) falseMsg(x)^(a+b-1) N(x;m+(a-1)v,v) dx / int_x falseMsg(x)^(a+b-1) N(x;m+(a-1)v,v) dx
                    double numerator = Math.Log(1 + Math.Exp(mp + 0.5 * vp));
                    // denominator = int_x falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v) dx
                    double denominator = prior.GetLogAverageOfPower(falseMsg, tc1 + fc1);
                    return(-(tc1 + fc1) * (numerator + numerator2 - denominator) + denominator + (tc1 * m + tc1 * tc1 * v * 0.5) - logistic.GetLogNormalizer());
                }
                else
                {
                    // numerator2 = int_x falseMsg(x)^(a+b-3) N(x;m+(a-1)v,v) dx
                    double   numerator2 = prior.GetLogAverageOfPower(falseMsg, tc1 + fc1 - 1);
                    Gaussian prior2 = prior * (falseMsg ^ (tc1 + fc1 - 1));
                    double   mp, vp;
                    prior2.GetMeanAndVariance(out mp, out vp);
                    // numerator = int_x sigma(-x) falseMsg(x)^(a+b-3) N(x;m+(a-1)v,v) dx / int_x falseMsg(x)^(a+b-3) N(x;m+(a-1)v,v) dx
                    double numerator = Math.Log(MMath.LogisticGaussian(-mp, vp));
                    // denominator = int_x falseMsg(x)^(a+b-2) N(x;m+(a-1)v,v) dx
                    double denominator = prior.GetLogAverageOfPower(falseMsg, tc1 + fc1);
                    return((tc1 + fc1) * (numerator + numerator2 - denominator) + denominator + (tc1 * m + tc1 * tc1 * v * 0.5) - logistic.GetLogNormalizer());
                }
            }
        }
Example #12
0
		public static double LogEvidenceRatio(Bernoulli sample, Beta probTrue)
		{
			return 0.0;
		}
Example #13
0
 public void ValidateToString()
 {
     var n = new Beta(1.0, 2.0);
     Assert.AreEqual("Beta(A = 1, B = 2)", n.ToString());
 }
Example #14
0
 public void ValidateDensity(
     [Values(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double a, 
     [Values(0.0, 0.0, 0.0, 0.1, 0.1, 0.1, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 100, 100, 100, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0)] double b, 
     [Values(0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, -1.0, 2.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0)] double x, 
     [Values(Double.PositiveInfinity, 0.0, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, 0.0, Double.PositiveInfinity, 1.0, 1.0, 1.0, 0.0, 0.03515625, 9.0, 0.0, 0.0, 0.0, 1.0881845516040810386311829462908430145307026037926335e-21, 0.0, Double.PositiveInfinity, 0.0, 0.0, 0.0, 0.0, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, 0.0, Double.PositiveInfinity)] double pdf)
 {
     var n = new Beta(a, b);
     AssertHelpers.AlmostEqual(pdf, n.Density(x), 13);
 }
Example #15
0
        public void SetShapeBFailsWithNegativeB()
        {
            var n = new Beta(1.0, 1.0);

            Assert.Throws <ArgumentOutOfRangeException>(() => n.B = -1.0);
        }
Example #16
0
#pragma warning disable 429
#endif

        /// <summary>VMP message to <c>x</c>.</summary>
        /// <param name="logistic">Incoming message from <c>logistic</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <param name="x">Incoming message from <c>x</c>.</param>
        /// <param name="to_x">Previous outgoing message to <c>x</c>.</param>
        /// <param name="a">Buffer <c>a</c>.</param>
        /// <returns>The outgoing VMP message to the <c>x</c> argument.</returns>
        /// <remarks>
        ///   <para>The outgoing message is the factor viewed as a function of <c>x</c> with <c>logistic</c> integrated out. The formula is <c>sum_logistic p(logistic) factor(logistic,x)</c>.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="logistic" /> is not a proper distribution.</exception>
        public static Gaussian XAverageLogarithm([SkipIfUniform] Beta logistic, /*[Proper, SkipIfUniform]*/ Gaussian x, Gaussian to_x, double a)
        {
            if (logistic.IsPointMass)
            {
                return(LogisticOp.XAverageLogarithm(logistic.Point));
            }
            // f(x) = sigma(x)^(a-1) sigma(-x)^(b-1)
            //      = sigma(x)^(a+b-2) exp(-x(b-1))
            // since sigma(-x) = sigma(x) exp(-x)

            double scale = logistic.TrueCount + logistic.FalseCount - 2;

            if (scale == 0.0)
            {
                return(Gaussian.Uniform());
            }
            double shift = -(logistic.FalseCount - 1);
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            double sa;

            if (double.IsPositiveInfinity(v))
            {
                a  = 0.5;
                sa = MMath.Logistic(m);
            }
            else
            {
                sa = MMath.Logistic(m + (1 - 2 * a) * v * 0.5);
            }
            double precision = a * a + (1 - 2 * a) * sa;
            // meanTimesPrecision = m*a*a + 1-2*a*sa;
            double meanTimesPrecision = m * precision + 1 - sa;
            //double vf = 1/(a*a + (1-2*a)*sa);
            //double mf = m + vf*(true ? 1-sa : sa);
            //double precision = 1/vf;
            //double meanTimesPrecision = mf*precision;
            Gaussian result = Gaussian.FromNatural(scale * meanTimesPrecision + shift, scale * precision);
            double   step   = (LogisticOp_SJ99.global_step == 0.0) ? 1.0 : (Rand.Double() * LogisticOp_SJ99.global_step);

            // random damping helps convergence, especially with parallel updates
            if (false && !x.IsPointMass)
            {
                // if the update would change the sign of 1-2*sa, send a message to make sa=0.5
                double newPrec = x.Precision - to_x.Precision + result.Precision;
                double newv    = 1 / newPrec;
                double newm    = newv * (x.MeanTimesPrecision - to_x.MeanTimesPrecision + result.MeanTimesPrecision);
                double newarg  = newm + (1 - 2 * a) * newv * 0.5;
                if ((sa < 0.5 && newarg > 0) || (sa > 0.5 && newarg < 0))
                {
                    // send a message to make newarg=0
                    // it is sufficient to make (x.MeanTimesPrecision + step*(result.MeanTimesPrecision - to_x.MeanTimesPrecision) + 0.5-a) = 0
                    double mpOffset   = x.MeanTimesPrecision + 0.5 - a;
                    double precOffset = x.Precision;
                    double mpScale    = result.MeanTimesPrecision - to_x.MeanTimesPrecision;
                    double precScale  = result.Precision - to_x.Precision;
                    double arg        = m + (1 - 2 * a) * v * 0.5;
                    //arg = 0;
                    step = (arg * precOffset - mpOffset) / (mpScale - arg * precScale);
                    //step = (a-0.5-x.MeanTimesPrecision)/(result.MeanTimesPrecision - to_x.MeanTimesPrecision);
                    //Console.WriteLine(step);
                }
            }
            if (step != 1.0)
            {
                result.Precision          = step * result.Precision + (1 - step) * to_x.Precision;
                result.MeanTimesPrecision = step * result.MeanTimesPrecision + (1 - step) * to_x.MeanTimesPrecision;
            }
            return(result);
        }
Example #17
0
 public static Beta LogisticInit()
 {
     return(Beta.Uniform());
 }
Example #18
0
        /// <summary>Evidence message for VMP.</summary>
        /// <param name="logistic">Incoming message from <c>logistic</c>.</param>
        /// <param name="x">Incoming message from <c>x</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <param name="to_logistic">Previous outgoing message to <c>logistic</c>.</param>
        /// <param name="a">Buffer <c>a</c>.</param>
        /// <returns>Zero.</returns>
        /// <remarks>
        ///   <para>In Variational Message Passing, the evidence contribution of a deterministic factor is zero. Adding up these values across all factors and variables gives the log-evidence estimate for VMP.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="x" /> is not a proper distribution.</exception>
        public static double AverageLogFactor(Beta logistic, [Proper, SkipIfUniform] Gaussian x, Beta to_logistic, double a)
        {
            double b = logistic.FalseCount;
            double scale = logistic.TrueCount + b - 2;
            double shift = -(b - 1);
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            double boundOnLog1PlusExp = a * a * v / 2.0 + MMath.Log1PlusExp(m + (1.0 - 2.0 * a) * v / 2.0);
            double boundOnLogSigma    = m - boundOnLog1PlusExp;

            return(scale * boundOnLogSigma + shift * m - logistic.GetLogNormalizer() - to_logistic.GetAverageLog(logistic));
        }
Example #19
0
        /// <summary>Evidence message for VMP.</summary>
        /// <param name="logistic">Incoming message from <c>logistic</c>.</param>
        /// <param name="x">Incoming message from <c>x</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <param name="to_logistic">Previous outgoing message to <c>logistic</c>.</param>
        /// <returns>Zero.</returns>
        /// <remarks>
        ///   <para>In Variational Message Passing, the evidence contribution of a deterministic factor is zero. Adding up these values across all factors and variables gives the log-evidence estimate for VMP.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="x" /> is not a proper distribution.</exception>
        public static double AverageLogFactor(Beta logistic, [Proper, SkipIfUniform] Gaussian x, Beta to_logistic)
        {
            double a     = logistic.TrueCount;
            double b     = logistic.FalseCount;
            double scale = a + b - 2;
            double shift = -(b - 1);
            // sigma(x) >= sigma(t) exp((x-t)/2 - a/2*(x^2 - t^2))
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            double t               = Math.Sqrt(m * m + v);
            double lambda          = (t == 0) ? 0.25 : Math.Tanh(t / 2) / (2 * t);
            double boundOnLogSigma = MMath.LogisticLn(t) + (m - t) / 2.0 - .5 * lambda * (m * m + v - t * t);

            return(scale * boundOnLogSigma + shift * m - logistic.GetLogNormalizer() - to_logistic.GetAverageLog(logistic));
        }
Example #20
0
 /// <summary>Evidence message for EP.</summary>
 /// <param name="logistic">Incoming message from <c>logistic</c>.</param>
 /// <param name="x">Constant value for <c>x</c>.</param>
 /// <returns>Logarithm of the factor's average value across the given argument distributions.</returns>
 /// <remarks>
 ///   <para>The formula for the result is <c>log(sum_(logistic) p(logistic) factor(logistic,x))</c>.</para>
 /// </remarks>
 public static double LogAverageFactor(Beta logistic, double x)
 {
     return(logistic.GetLogProb(MMath.Logistic(x)));
 }
Example #21
0
        //-- VMP -------------------------------------------------------------------------------------------------

        /// <summary>Evidence message for VMP.</summary>
        /// <param name="x">Incoming message from <c>x</c>. Must be a proper distribution. If uniform, the result will be uniform.</param>
        /// <param name="logistic">Incoming message from <c>logistic</c>.</param>
        /// <param name="to_logistic">Previous outgoing message to <c>logistic</c>.</param>
        /// <returns>Zero.</returns>
        /// <remarks>
        ///   <para>In Variational Message Passing, the evidence contribution of a deterministic factor is zero. Adding up these values across all factors and variables gives the log-evidence estimate for VMP.</para>
        /// </remarks>
        /// <exception cref="ImproperMessageException">
        ///   <paramref name="x" /> is not a proper distribution.</exception>
        //[Skip]
        public static double AverageLogFactor([Proper, SkipIfUniform] Gaussian x, Beta logistic, Beta to_logistic)
        {
            double m, v;

            x.GetMeanAndVariance(out m, out v);
            double l1pe = v == 0 ? MMath.Log1PlusExp(m) : MMath.Log1PlusExpGaussian(m, v);

            return((logistic.TrueCount - 1.0) * (m - l1pe) + (logistic.FalseCount - 1.0) * (-l1pe) - logistic.GetLogNormalizer() - to_logistic.GetAverageLog(logistic));
        }
Example #22
0
 public void ValidateMedianThrowsNotSupportedException()
 {
     var n = new Beta(0.0, 1.0);
     Assert.Throws<NotSupportedException>(() => { var m = n.Median; });
 }
Example #23
0
 public void CanSample()
 {
     var n = new Beta(2.0, 3.0);
     n.Sample();
 }
Example #24
0
        public void ValidateMean(double a, double b, double mean)
        {
            var n = new Beta(a, b);

            Assert.AreEqual(mean, n.Mean);
        }
Example #25
0
 public void ValidateCumulativeDistribution(
     [Values(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 9.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double a, 
     [Values(0.0, 0.0, 0.0, 0.1, 0.1, 0.1, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 100, 100, 100, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0)] double b, 
     [Values(0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0)] double x, 
     [Values(0.5, 0.5, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 0.0, 0.5, 1.0, 0.0, 0.001953125, 1.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 0.0, 0.0, 1.0)] double cdf)
 {
     var n = new Beta(a, b);
     AssertHelpers.AlmostEqual(cdf, n.CumulativeDistribution(x), 13);
 }
Example #26
0
        public void ValidateEntropy(double a, double b, double entropy)
        {
            var n = new Beta(a, b);

            AssertHelpers.AlmostEqual(entropy, n.Entropy, 14);
        }
Example #27
0
 public static Beta LogisticAverageConditionalInit()
 {
     return(Beta.Uniform());
 }
Example #28
0
        public void ValidateSkewness(double a, double b, double skewness)
        {
            var n = new Beta(a, b);

            AssertHelpers.AlmostEqual(skewness, n.Skewness, 15);
        }
Example #29
0
		/// <summary>
		/// Evidence message for EP
		/// </summary>
		/// <param name="sample">Constant value for 'sample'.</param>
		/// <param name="probTrue">Incoming message from 'probTrue'.</param>
		/// <returns>Logarithm of the factor's average value across the given argument distributions</returns>
		/// <remarks><para>
		/// The formula for the result is <c>log(sum_(probTrue) p(probTrue) factor(sample,probTrue))</c>.
		/// </para></remarks>
		public static double LogAverageFactor(bool sample, Beta probTrue)
		{
			Bernoulli to_sample = SampleAverageConditional(probTrue);
			return to_sample.GetLogProb(sample);
		}
Example #30
0
        public void ValidateMode(double a, double b, double mode)
        {
            var n = new Beta(a, b);

            Assert.AreEqual(mode, n.Mode);
        }
Example #31
0
        /// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BernoulliFromBetaOp"]/message_doc[@name="ProbTrueAverageConditional(Bernoulli, Beta)"]/*'/>
        public static Beta ProbTrueAverageConditional([SkipIfUniform] Bernoulli sample, Beta probTrue)
        {
            // this code is similar to DiscreteFromDirichletOp.PAverageConditional()
            if (probTrue.IsPointMass)
            {
                return(ProbTrueAverageConditional(sample, probTrue.Point));
            }
            if (sample.IsPointMass)
            {
                // shortcut
                return(ProbTrueConditional(sample.Point));
            }
            if (!probTrue.IsProper())
            {
                throw new ImproperMessageException(probTrue);
            }
            // q(x) is the distribution stored in this.X.
            // q(p) is the distribution stored in this.P.
            // f(x,p) is the factor.
            // Z = sum_x q(x) int_p f(x,p)*q(p) = q(false)*E[1-p] + q(true)*E[p]
            // Ef[p] = 1/Z sum_x q(x) int_p p*f(x,p)*q(p) = 1/Z (q(false)*E[p(1-p)] + q(true)*E[p^2])
            // Ef[p^2] = 1/Z sum_x q(x) int_p p^2*f(x,p)*q(p) = 1/Z (q(false)*E[p^2(1-p)] + q(true)*E[p^3])
            // var_f(p) = Ef[p^2] - Ef[p]^2
            double mo  = probTrue.GetMean();
            double m2o = probTrue.GetMeanSquare();
            double pT  = sample.GetProbTrue();
            double pF  = sample.GetProbFalse();
            double Z   = pF * (1 - mo) + pT * mo;
            double m   = pF * (mo - m2o) + pT * m2o;

            m = m / Z;
            if (!Beta.AllowImproperSum)
            {
                if (pT < 0.5)
                {
                    double inc = probTrue.TotalCount * (mo / m - 1);
                    return(new Beta(1, 1 + inc));
                }
                else
                {
                    double inc = probTrue.TotalCount * ((1 - mo) / (1 - m) - 1);
                    return(new Beta(1 + inc, 1));
                }
            }
            else
            {
                double m3o = probTrue.GetMeanCube();
                double m2  = pF * (m2o - m3o) + pT * m3o;
                m2 = m2 / Z;
                Beta result = Beta.FromMeanAndVariance(m, m2 - m * m);
                result.SetToRatio(result, probTrue);
                return(result);
            }
        }
Example #32
0
        public void ValidateMedianThrowsNotSupportedException()
        {
            var n = new Beta(0.0, 1.0);

            Assert.Throws <NotSupportedException>(() => { var m = n.Median; });
        }
Example #33
0
        public void ValidateMaximum()
        {
            var n = new Beta(1.0, 1.0);

            Assert.AreEqual(1.0, n.Maximum);
        }
Example #34
0
 public void CanSampleStatic()
 {
     Beta.Sample(new Random(), 2.0, 3.0);
 }
Example #35
0
 /// <summary>
 /// Returns a hash code for this instance.
 /// </summary>
 /// <returns>A hash code for this instance, suitable for use in hashing algorithms and data structures like a hash table.</returns>
 public override int GetHashCode()
 {
     return(Alpha.GetHashCode() ^ Beta.GetHashCode() ^ Gamma.GetHashCode());
 }
Example #36
0
        public void CanSampleSequenceStatic()
        {
            var ied = Beta.Samples(new Random(), 2.0, 3.0);

            ied.Take(5).ToArray();
        }
Example #37
0
 public void FailSampleSequenceStatic()
 {
     Assert.That(() => Beta.Samples(new Random(0), 1.0, -1.0).First(), Throws.ArgumentException);
 }
Example #38
0
 public void FailSampleSequenceStatic()
 {
     Assert.Throws <ArgumentOutOfRangeException>(() => Beta.Samples(new Random(), 1.0, -1.0).First());
 }
Example #39
0
 public void ValidateMaximum()
 {
     var n = new Beta(1.0, 1.0);
     Assert.AreEqual(1.0, n.Maximum);
 }
		/// <summary>
		/// VMP message to 'mean'
		/// </summary>
		/// <param name="mean">Incoming message from 'mean'. Must be a proper distribution.  If uniform, the result will be uniform. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="totalCount">Constant value for 'totalCount'.</param>
		/// <param name="prob">Constant value for 'prob'.</param>
		/// <param name="to_mean">Previous outgoing message to 'Mean'.</param>
		/// <returns>The outgoing VMP message to the 'mean' argument</returns>
		/// <remarks><para>
		/// The outgoing message is the factor viewed as a function of 'mean' conditioned on the given values.
		/// </para></remarks>
		/// <exception cref="ImproperMessageException"><paramref name="mean"/> is not a proper distribution</exception>
		public static Beta MeanAverageLogarithm([Proper] Beta mean, double totalCount, double prob, Beta to_mean)
		{
			return MeanAverageLogarithm(mean, Gamma.PointMass(totalCount), Beta.PointMass(prob), to_mean);
		}
Example #41
0
 public void CanSampleSequence()
 {
     var n = new Beta(2.0, 3.0);
     var ied = n.Samples();
     ied.Take(5).ToArray();
 }
		/// <summary>
		/// VMP message to 'mean'
		/// </summary>
		/// <param name="mean">Incoming message from 'mean'. Must be a proper distribution.  If uniform, the result will be uniform. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="totalCount">Incoming message from 'totalCount'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="prob">Incoming message from 'prob'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="to_mean">Previous outgoing message to 'Mean'.</param>
		/// <returns>The outgoing VMP message to the 'mean' argument</returns>
		/// <remarks><para>
		/// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'mean'.
		/// The formula is <c>exp(sum_(totalCount,prob) p(totalCount,prob) log(factor(prob,mean,totalCount)))</c>.
		/// </para></remarks>
		/// <exception cref="ImproperMessageException"><paramref name="mean"/> is not a proper distribution</exception>
		/// <exception cref="ImproperMessageException"><paramref name="totalCount"/> is not a proper distribution</exception>
		/// <exception cref="ImproperMessageException"><paramref name="prob"/> is not a proper distribution</exception>
		public static Beta MeanAverageLogarithm([Proper] Beta mean, [Proper] Gamma totalCount, [SkipIfUniform] Beta prob, Beta to_mean)
		{
			// Calculate gradient using method for DirichletOp
			double ELogP, ELogOneMinusP;
			prob.GetMeanLogs(out ELogP, out ELogOneMinusP);
			Vector gradS = DirichletOp.CalculateGradientForMean(
				 Vector.FromArray(new double[] { mean.TrueCount, mean.FalseCount }),
				 totalCount,
				 Vector.FromArray(new double[] { ELogP, ELogOneMinusP }));
			// Project onto a Beta distribution 
			Matrix A = new Matrix(2, 2);
			double c = MMath.Trigamma(mean.TotalCount);
			A[0, 0] = MMath.Trigamma(mean.TrueCount) - c;
			A[1, 0] = A[0, 1] = -c;
			A[1, 1] = MMath.Trigamma(mean.FalseCount) - c;
			Vector theta = GammaFromShapeAndRateOp.twoByTwoInverse(A)*gradS;
			Beta approximateFactor = new Beta(theta[0] + 1, theta[1] + 1);
			if (damping == 0.0)
				return approximateFactor;
			else
				return (approximateFactor^(1-damping)) * (to_mean ^ damping);
		}
Example #43
0
 public void ValidateDensityLn(
     [Values(0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 9.0, 9.0, 9.0, 9.0, 9.0, 5.0, 5.0, 5.0, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity)] double a, 
     [Values(0.0, 0.0, 0.0, 0.1, 0.1, 0.1, 0.0, 0.0, 0.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 100, 100, 100, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 1.0, 1.0, 1.0, Double.PositiveInfinity, Double.PositiveInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0)] double b, 
     [Values(0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, -1.0, 2.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0, 0.0, 0.5, 1.0)] double x, 
     [Values(Double.PositiveInfinity, Double.NegativeInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.PositiveInfinity, 0.0, 0.0, 0.0, Double.NegativeInfinity, -3.3479528671433430925473664978203611353090199592365458, 2.1972245773362193827904904738450514092949811156454996, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, -51.447830024537682154565870837960406410586196074573801, Double.NegativeInfinity, Double.PositiveInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.PositiveInfinity, Double.PositiveInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.NegativeInfinity, Double.PositiveInfinity)] double pdfln)
 {
     var n = new Beta(a, b);
     AssertHelpers.AlmostEqual(pdfln, n.DensityLn(x), 14);
 }
		public static double LogEvidenceRatio(Beta sample, double trueCount, double falseCount) { return 0.0; }
Example #45
0
 public void CanCreateBeta([Values(0.0, 0.0, 1.0, 1.0, 9.0, 5.0, 1.0, Double.PositiveInfinity, 0.0, Double.PositiveInfinity)] double a, [Values(0.0, 0.1, 0.0, 1.0, 1.0, 100.0, Double.PositiveInfinity, 1.0, Double.PositiveInfinity, 0.0)] double b)
 {
     var n = new Beta(a, b);
     Assert.AreEqual(a, n.A);
     Assert.AreEqual(b, n.B);
 }
		public static double LogEvidenceRatio(Beta sample, double mean, double variance) { return 0.0; }
Example #47
0
        public void CanSample()
        {
            var n = new Beta(2.0, 3.0);

            n.Sample();
        }
Example #48
0
 public static double LogEvidenceRatio(Beta logistic, Gaussian x, Gaussian falseMsg, [Fresh] Beta to_logistic)
 {
     // always zero when using the stabilized message from LogisticAverageConditional
     return(0.0);
     //return LogAverageFactor(logistic, x, falseMsg) - to_logistic.GetLogAverageOf(logistic);
 }
		/// <summary>
		/// VMP message to 'mean'
		/// </summary>
		/// <param name="mean">Incoming message from 'mean'. Must be a proper distribution.  If uniform, the result will be uniform. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="totalCount">Constant value for 'totalCount'.</param>
		/// <param name="prob">Incoming message from 'prob'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="to_mean">Previous outgoing message to 'Mean'.</param>
		/// <returns>The outgoing VMP message to the 'mean' argument</returns>
		/// <remarks><para>
		/// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'mean'.
		/// The formula is <c>exp(sum_(prob) p(prob) log(factor(prob,mean,totalCount)))</c>.
		/// </para></remarks>
		/// <exception cref="ImproperMessageException"><paramref name="mean"/> is not a proper distribution</exception>
		/// <exception cref="ImproperMessageException"><paramref name="prob"/> is not a proper distribution</exception>
		public static Beta MeanAverageLogarithm([Proper] Beta mean, double totalCount, [SkipIfUniform] Beta prob, Beta to_mean)
		{
			return MeanAverageLogarithm(mean, Gamma.PointMass(totalCount), prob, to_mean);
		}
Example #50
0
        public void ValidateDensity(double a, double b, double x, double pdf)
        {
            var n = new Beta(a, b);

            AssertHelpers.AlmostEqual(pdf, n.Density(x), 13);
        }
		/// <summary>
		/// Evidence message for EP.
		/// </summary>
		/// <param name="prob">Incoming message from 'prob'.</param>
		/// <param name="mean">Constant value for 'mean'.</param>
		/// <param name="totalCount">Constant value for 'totalCount'.</param>
		/// <returns>Logarithm of the factor's average value across the given argument distributions.</returns>
		/// <remarks><para>
		/// The formula for the result is <c>log(sum_(prob) p(prob) factor(prob,mean,totalCount))</c>.
		/// </para></remarks>
		public static double LogAverageFactor(Beta prob, double mean, double totalCount)
		{
			var g = new Beta(mean*totalCount, (1-mean)*totalCount);
			return g.GetLogAverageOf(prob);
		}
Example #52
0
        public void ValidateDensityLn(double a, double b, double x, double pdfln)
        {
            var n = new Beta(a, b);

            AssertHelpers.AlmostEqual(pdfln, n.DensityLn(x), 14);
        }
		/// <summary>
		/// Evidence message for VMP
		/// </summary>
		/// <param name="sample">Incoming message from 'sample'.</param>
		/// <param name="trueCount">Constant value for 'trueCount'.</param>
		/// <param name="falseCount">Constant value for 'falseCount'.</param>
		/// <param name="to_sample">Outgoing message to 'sample'.</param>
		/// <returns>Average of the factor's log-value across the given argument distributions</returns>
		/// <remarks><para>
		/// The formula for the result is <c>sum_(sample) p(sample) log(factor(sample,trueCount,falseCount))</c>.
		/// Adding up these values across all factors and variables gives the log-evidence estimate for VMP.
		/// </para></remarks>
		public static double AverageLogFactor(Beta sample, double trueCount, double falseCount, [Fresh] Beta to_sample)
		{
			return to_sample.GetAverageLog(sample);
		}
Example #54
0
        public void ValidateCumulativeDistribution(double a, double b, double x, double cdf)
        {
            var n = new Beta(a, b);

            AssertHelpers.AlmostEqual(cdf, n.CumulativeDistribution(x), 13);
        }
		/// <summary>
		/// Evidence message for VMP
		/// </summary>
		/// <param name="sample">Incoming message from 'sample'.</param>
		/// <param name="mean">Constant value for 'mean'.</param>
		/// <param name="variance">Constant value for 'variance'.</param>
		/// <param name="to_sample">Outgoing message to 'sample'.</param>
		/// <returns>Average of the factor's log-value across the given argument distributions</returns>
		/// <remarks><para>
		/// The formula for the result is <c>sum_(sample) p(sample) log(factor(sample,mean,variance))</c>.
		/// Adding up these values across all factors and variables gives the log-evidence estimate for VMP.
		/// </para></remarks>
		public static double AverageLogFactor(Beta sample, double mean, double variance, [Fresh] Beta to_sample)
		{
			return to_sample.GetAverageLog(sample);
		}
Example #56
0
        public void ValidateToString()
        {
            var n = new Beta(1.0, 2.0);

            Assert.AreEqual("Beta(A = 1, B = 2)", n.ToString());
        }
Example #57
0
		/// <summary>
		/// EP message to 'probTrue'
		/// </summary>
		/// <param name="sample">Incoming message from 'sample'. Must be a proper distribution.  If uniform, the result will be uniform.</param>
		/// <param name="probTrue">Incoming message from 'probTrue'.</param>
		/// <returns>The outgoing EP message to the 'probTrue' argument</returns>
		/// <remarks><para>
		/// The outgoing message is a distribution matching the moments of 'probTrue' as the random arguments are varied.
		/// The formula is <c>proj[p(probTrue) sum_(sample) p(sample) factor(sample,probTrue)]/p(probTrue)</c>.
		/// </para></remarks>
		/// <exception cref="ImproperMessageException"><paramref name="sample"/> is not a proper distribution</exception>
		public static Beta ProbTrueAverageConditional([SkipIfUniform] Bernoulli sample, Beta probTrue)
		{
			// this code is similar to DiscreteFromDirichletOp.PAverageConditional()
			if (probTrue.IsPointMass) {
				return Beta.Uniform();
			}
			if (sample.IsPointMass) {
				// shortcut
				return ProbTrueConditional(sample.Point);
			}
			if (!probTrue.IsProper()) throw new ImproperMessageException(probTrue);
			// q(x) is the distribution stored in this.X.
			// q(p) is the distribution stored in this.P.
			// f(x,p) is the factor.
			// Z = sum_x q(x) int_p f(x,p)*q(p) = q(false)*E[1-p] + q(true)*E[p]
			// Ef[p] = 1/Z sum_x q(x) int_p p*f(x,p)*q(p) = 1/Z (q(false)*E[p(1-p)] + q(true)*E[p^2])
			// Ef[p^2] = 1/Z sum_x q(x) int_p p^2*f(x,p)*q(p) = 1/Z (q(false)*E[p^2(1-p)] + q(true)*E[p^3])
			// var_f(p) = Ef[p^2] - Ef[p]^2
			double mo = probTrue.GetMean();
			double m2o = probTrue.GetMeanSquare();
			double pT = sample.GetProbTrue();
			double pF = sample.GetProbFalse();
			double Z = pF * (1 - mo) + pT * mo;
			double m = pF * (mo - m2o) + pT * m2o;
			m = m / Z;
			if (!Beta.AllowImproperSum) {
				if (pT < 0.5) {
					double inc = probTrue.TotalCount * (mo / m - 1);
					return new Beta(1, 1 + inc);
				} else {
					double inc = probTrue.TotalCount * ((1 - mo) / (1 - m) - 1);
					return new Beta(1 + inc, 1);
				}
			} else {
				double m3o = probTrue.GetMeanCube();
				double m2 = pF * (m2o - m3o) + pT * m3o;
				m2 = m2 / Z;
				Beta result = Beta.FromMeanAndVariance(m, m2 - m * m);
				result.SetToRatio(result, probTrue);
				return result;
			}
		}
Example #58
0
        // Sample data from a DINA/NIDA model and then use Infer.NET to recover the parameters.
        public void Run()
        {
            InferenceEngine engine = new InferenceEngine();

            if (!(engine.Algorithm is Algorithms.ExpectationPropagation))
            {
                Console.WriteLine("This example only runs with Expectation Propagation");
                return;
            }

            bool useDina    = true;
            Beta slipPrior  = new Beta(1, 10);
            Beta guessPrior = new Beta(1, 10);

            Rand.Restart(0);
            int nStudents  = 100;
            int nQuestions = 20;
            int nSkills    = 3;

            int[][] skillsRequired = new int[nQuestions][];
            for (int q = 0; q < nQuestions; q++)
            {
                // each question requires a random set of skills
                int[] skills = Rand.Perm(nSkills);
                int   n      = Rand.Int(nSkills) + 1;
                skillsRequired[q] = Util.ArrayInit(n, i => skills[i]);
                Console.WriteLine("skillsRequired[{0}] = {1}", q, Util.CollectionToString(skillsRequired[q]));
            }

            double[] pSkill, slip, guess;
            bool[][] hasSkill;
            VariableArray <double> slipVar, guessVar, pSkillVar;
            VariableArray <VariableArray <bool>, bool[][]> hasSkillVar;

            if (useDina)
            {
                bool[][] responses = DinaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
                DinaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
            }
            else
            {
                bool[][] responses = NidaSample(nStudents, nSkills, skillsRequired, slipPrior, guessPrior, out pSkill, out slip, out guess, out hasSkill);
                NidaModel(responses, nSkills, skillsRequired, slipPrior, guessPrior, out pSkillVar, out slipVar, out guessVar, out hasSkillVar);
            }

            engine.NumberOfIterations = 10;
            Bernoulli[][] hasSkillPost = engine.Infer <Bernoulli[][]>(hasSkillVar);
            int           numErrors    = 0;

            for (int i = 0; i < nStudents; i++)
            {
                for (int s = 0; s < nSkills; s++)
                {
                    if (hasSkill[i][s] != (hasSkillPost[i][s].LogOdds > 0))
                    {
                        numErrors++;
                    }
                }
            }

            Console.WriteLine("{0:0}% of skills recovered correctly", 100.0 - 100.0 * numErrors / (nStudents * nSkills));
            Beta[] pSkillPost = engine.Infer <Beta[]>(pSkillVar);
            Beta[] slipPost   = engine.Infer <Beta[]>(slipVar);
            Beta[] guessPost  = engine.Infer <Beta[]>(guessVar);
            for (int s = 0; s < nSkills; s++)
            {
                Console.WriteLine("pSkill[{0}] = {1} (sampled from {2})", s, pSkillPost[s], pSkill[s].ToString("g4"));
            }

            for (int i = 0; i < System.Math.Min(3, slipPost.Length); i++)
            {
                Console.WriteLine("slip[{0}] = {1} (sampled from {2})", i, slipPost[i], slip[i].ToString("g4"));
            }

            for (int i = 0; i < System.Math.Min(3, guessPost.Length); i++)
            {
                Console.WriteLine("guess[{0}] = {1} (sampled from {2})", i, guessPost[i], guess[i].ToString("g4"));
            }
        }
Example #59
0
		/// <summary>
		/// Evidence message for EP
		/// </summary>
		/// <param name="sample">Constant value for 'sample'.</param>
		/// <param name="probTrue">Incoming message from 'probTrue'.</param>
		/// <returns>Logarithm of the factor's contribution the EP model evidence</returns>
		/// <remarks><para>
		/// The formula for the result is <c>log(sum_(probTrue) p(probTrue) factor(sample,probTrue))</c>.
		/// Adding up these values across all factors and variables gives the log-evidence estimate for EP.
		/// </para></remarks>
		public static double LogEvidenceRatio(bool sample, Beta probTrue)
		{
			return LogAverageFactor(sample, probTrue);
		}
Example #60
0
        // Construct a NIDA model in Infer.NET
        public static void NidaModel(
            bool[][] responsesData,
            int nSkills,
            int[][] skillsRequired,
            Beta slipPrior,
            Beta guessPrior,
            out VariableArray <double> pSkill,
            out VariableArray <double> slip,
            out VariableArray <double> guess,
            out VariableArray <VariableArray <bool>, bool[][]> hasSkill)
        {
            // The Infer.NET model follows the same structure as the sampler above, but using Variables and Ranges
            int   nStudents  = responsesData.Length;
            int   nQuestions = responsesData[0].Length;
            Range student    = new Range(nStudents);
            Range question   = new Range(nQuestions);
            Range skill      = new Range(nSkills);
            var   responses  = Variable.Array(Variable.Array <bool>(question), student).Named("responses");

            responses.ObservedValue = responsesData;

            pSkill        = Variable.Array <double>(skill).Named("pSkill");
            pSkill[skill] = Variable.Beta(1, 1).ForEach(skill);
            slip          = Variable.Array <double>(skill).Named("slip");
            slip[skill]   = Variable.Random(slipPrior).ForEach(skill);
            guess         = Variable.Array <double>(skill).Named("guess");
            guess[skill]  = Variable.Random(guessPrior).ForEach(skill);

            hasSkill = Variable.Array(Variable.Array <bool>(skill), student).Named("hasSkill");
            hasSkill[student][skill] = Variable.Bernoulli(pSkill[skill]).ForEach(student);

            VariableArray <int> nSkillsRequired = Variable.Array <int>(question).Named("nSkillsRequired");

            nSkillsRequired.ObservedValue = Util.ArrayInit(nQuestions, q => skillsRequired[q].Length);
            Range skillForQuestion          = new Range(nSkillsRequired[question]).Named("skillForQuestion");
            var   skillsRequiredForQuestion = Variable.Array(Variable.Array <int>(skillForQuestion), question).Named("skillsRequiredForQuestion");

            skillsRequiredForQuestion.ObservedValue = skillsRequired;
            skillsRequiredForQuestion.SetValueRange(skill);

            using (Variable.ForEach(student))
            {
                using (Variable.ForEach(question))
                {
                    VariableArray <bool>   hasSkills     = Variable.Subarray(hasSkill[student], skillsRequiredForQuestion[question]);
                    VariableArray <double> slipSkill     = Variable.Subarray(slip, skillsRequiredForQuestion[question]);
                    VariableArray <double> guessSkill    = Variable.Subarray(guess, skillsRequiredForQuestion[question]);
                    VariableArray <bool>   exhibitsSkill = Variable.Array <bool>(skillForQuestion).Named("exhibitsSkill");
                    using (Variable.ForEach(skillForQuestion))
                    {
                        using (Variable.If(hasSkills[skillForQuestion]))
                        {
                            exhibitsSkill[skillForQuestion] = !Variable.Bernoulli(slipSkill[skillForQuestion]);
                        }

                        using (Variable.IfNot(hasSkills[skillForQuestion]))
                        {
                            exhibitsSkill[skillForQuestion] = Variable.Bernoulli(guessSkill[skillForQuestion]);
                        }
                    }

                    responses[student][question] = Variable.AllTrue(exhibitsSkill);
                }
            }
        }