public void MaxTest2() { foreach (double max in new[] { 0.0, 2.0 }) { double oldm = double.NaN; double oldv = double.NaN; for (int i = 0; i < 300; i++) { Gaussian a = new Gaussian(System.Math.Pow(10, i), 177); Gaussian to_a = MaxGaussianOp.AAverageConditional(max, a, 0); Gaussian to_b = MaxGaussianOp.BAverageConditional(max, 0, a); Assert.Equal(to_a, to_b); if (max == 0) { Gaussian to_a2 = IsPositiveOp.XAverageConditional(false, a); double error = System.Math.Max(MMath.AbsDiff(to_a.MeanTimesPrecision, to_a2.MeanTimesPrecision, double.Epsilon), MMath.AbsDiff(to_a.Precision, to_a2.Precision, double.Epsilon)); //Trace.WriteLine($"{a} {to_a} {to_a2} {error}"); Assert.True(error < 1e-12); } //else Trace.WriteLine($"{a} {to_a}"); double m, v; to_a.GetMeanAndVariance(out m, out v); if (!double.IsNaN(oldm)) { Assert.True(v <= oldv); double olddiff = System.Math.Abs(max - oldm); double diff = System.Math.Abs(max - m); Assert.True(diff <= olddiff); } oldm = m; oldv = v; } } }
public static Gaussian FindxB(Gaussian xB, Gaussian meanPrior, Gamma precPrior, Gaussian xF) { Gaussian xB3 = IsPositiveOp.XAverageConditional(true, xF); Func <Vector, double> func = delegate(Vector x2) { Gaussian xB2 = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1])); //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB2, meanPrior, precPrior); Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB2, meanPrior, precPrior); //Assert.True(xF2.MaxDiff(xF3) < 1e-10); //return Math.Pow((xF*xB2).GetMean() - (xF2*xB2).GetMean(), 2) + Math.Pow((xF*xB2).GetVariance() - (xF2*xB2).GetVariance(), 2); //return KlDiv(xF2*xB2, xF*xB2) + KlDiv(xF*xB3, xF*xB2); //return KlDiv(xF2*xB2, xF*xB2) + Math.Pow((xF*xB3).GetMean() - (xF*xB2).GetMean(),2); return(MeanError(xF2 * xB2, xF * xB2) + KlDiv(xF * xB3, xF * xB2)); //return xF.MaxDiff(xF2); //Gaussian q = new Gaussian(0, 0.1); //return Math.Pow((xF*q).GetMean() - (xF2*q).GetMean(), 2) + Math.Pow((xF*q).GetVariance() - (xF2*q).GetVariance(), 2); }; double m = xB.GetMean(); double p = xB.Precision; Vector x = Vector.FromArray(m, System.Math.Log(p)); Minimize2(func, x); return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1]))); }
public static Gaussian FindxF3(Gaussian xExpected, double evExpected, Gaussian meanPrior, Gamma precPrior, Gaussian xF) { Func <Vector, double> func = delegate(Vector x2) { Gaussian xFt = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1])); Gaussian xB = IsPositiveOp.XAverageConditional(true, xFt); Gaussian xM = xFt * xB; //return KlDiv(xExpected, xM); return(KlDiv(xM, xExpected)); //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB, meanPrior, precPrior); //Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior); //Gaussian xM2 = xF2*xB; //double ev1 = IsPositiveOp.LogAverageFactor(true, xFt); //double ev2 = GaussianOp.LogAverageFactor_slow(xB, meanPrior, precPrior) - xFt.GetLogAverageOf(xB); //double ev = ev1 + ev2; //return xExpected.MaxDiff(xM); //return Math.Pow(xExpected.GetMean() - xM.GetMean(), 2) + Math.Pow(ev - Math.Log(evExpected), 2); //return 100*Math.Pow(xM.GetMean() - xM2.GetMean(), 2) -ev; //return 100*Math.Pow(ev2, 2) + Math.Pow(ev - Math.Log(evExpected), 2); //return 100*Math.Pow(ev2, 2) + Math.Pow(xM2.GetMean() - xM.GetMean(), 2); }; double m = xF.GetMean(); double p = xF.Precision; Vector x = Vector.FromArray(m, System.Math.Log(p)); Minimize2(func, x); return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1]))); }
internal void StudentIsPositiveTest4() { double shape = 1; Gamma precPrior = Gamma.FromShapeAndRate(shape, shape); // mean=-1 causes improper messages double mean = -1; Gaussian meanPrior = Gaussian.PointMass(mean); double evExpected; Gaussian xExpected = StudentIsPositiveExact(mean, precPrior, out evExpected); GaussianOp.ForceProper = false; GaussianOp_Laplace.modified = true; GaussianOp_Laplace.modified2 = true; Gaussian xF = Gaussian.Uniform(); Gaussian xB = Gaussian.Uniform(); Gamma q = GaussianOp_Laplace.QInit(); double r0 = 0.38; r0 = 0.1; for (int iter = 0; iter < 20; iter++) { q = GaussianOp_Laplace.Q(xB, meanPrior, precPrior, q); //xF = GaussianOp_Laplace.SampleAverageConditional(xB, meanPrior, precPrior, q); xF = Gaussian.FromMeanAndPrecision(mean, r0); xB = IsPositiveOp.XAverageConditional(true, xF); Console.WriteLine("xF = {0} xB = {1}", xF, xB); } Console.WriteLine("x = {0} should be {1}", xF * xB, xExpected); double[] precs = EpTests.linspace(1e-3, 5, 100); double[] evTrue = new double[precs.Length]; double[] evApprox = new double[precs.Length]; double[] evApprox2 = new double[precs.Length]; //r0 = q.GetMean(); double sum = 0, sum2 = 0; for (int i = 0; i < precs.Length; i++) { double r = precs[i]; Gaussian xFt = Gaussian.FromMeanAndPrecision(mean, r); evTrue[i] = IsPositiveOp.LogAverageFactor(true, xFt) + precPrior.GetLogProb(r); evApprox[i] = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r) + xB.GetLogAverageOf(xFt) - xB.GetLogAverageOf(xF); evApprox2[i] = IsPositiveOp.LogAverageFactor(true, xF) + precPrior.GetLogProb(r0) + q.GetLogProb(r) - q.GetLogProb(r0); sum += System.Math.Exp(evApprox[i]); sum2 += System.Math.Exp(evApprox2[i]); } Console.WriteLine("r0 = {0}: {1} {2} {3}", r0, sum, sum2, q.GetVariance() + System.Math.Pow(r0 - q.GetMean(), 2)); //TODO: change path for cross platform using using (var writer = new MatlabWriter(@"..\..\..\Tests\student.mat")) { writer.Write("z", evTrue); writer.Write("z2", evApprox); writer.Write("z3", evApprox2); writer.Write("precs", precs); } }
internal void StudentIsPositiveTest3() { double shape = 1; Gamma precPrior = Gamma.FromShapeAndRate(shape, shape); Gaussian meanPrior = Gaussian.PointMass(0); Gaussian xB = Gaussian.Uniform(); Gaussian xF = GaussianOp.SampleAverageConditional_slow(xB, meanPrior, precPrior); for (int iter = 0; iter < 100; iter++) { xB = IsPositiveOp.XAverageConditional(true, xF); xF = GetConstrainedMessage(xB, meanPrior, precPrior, xF); } Console.WriteLine("xF = {0} x = {1}", xF, xB * xF); }
public static Gaussian FindxF2(Gaussian meanPrior, Gamma precPrior, Gaussian xF) { Func <Vector, double> func = delegate(Vector x2) { Gaussian xFt = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1])); Gaussian xB = IsPositiveOp.XAverageConditional(true, xFt); Gaussian xF2 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior); return(xFt.MaxDiff(xF2)); }; double m = xF.GetMean(); double p = xF.Precision; Vector x = Vector.FromArray(m, System.Math.Log(p)); Minimize2(func, x); return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1]))); }
/// <summary>Computations that depend on the observed value of FeatureIndexes and FeatureValues and InstanceCount and InstanceFeatureCounts and WeightConstraints and WeightPriors</summary> private void Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_WeightConstraints_WeightPri6() { if (this.Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_WeightConstraints_WeightPri6_isDone) { return; } for (int InstanceRange = 0; InstanceRange < this.InstanceCount; InstanceRange++) { for (int InstanceFeatureRanges = 0; InstanceFeatureRanges < this.InstanceFeatureCounts[InstanceRange]; InstanceFeatureRanges++) { this.FeatureScores_F[InstanceRange][InstanceFeatureRanges] = GaussianProductOpBase.ProductAverageConditional(this.FeatureValues[InstanceRange][InstanceFeatureRanges], this.Weights_FeatureIndexes_F[InstanceRange][InstanceFeatureRanges]); } this.Score_F[InstanceRange] = FastSumOp.SumAverageConditional(this.FeatureScores_F[InstanceRange]); this.NoisyScore_F[InstanceRange] = GaussianFromMeanAndVarianceOp.SampleAverageConditional(this.Score_F[InstanceRange], 1.0); this.Labels_F[InstanceRange] = IsPositiveOp.IsPositiveAverageConditional(this.NoisyScore_F[InstanceRange]); this.Labels_marginal_F[InstanceRange] = DerivedVariableOp.MarginalAverageConditional <Bernoulli>(this.Labels_use_B_reduced, this.Labels_F[InstanceRange], this.Labels_marginal_F[InstanceRange]); } this.Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_WeightConstraints_WeightPri6_isDone = true; }
/// <summary>Computations that depend on the observed value of y and biasPrior and xValueCount and xValues and wPrior and xIndices</summary> public void Changed_y_biasPrior_xValueCount_xValues_wPrior_xIndices() { if (this.Changed_y_biasPrior_xValueCount_xValues_wPrior_xIndices_iterationsDone == 1) { return; } this.vdouble13_use_B = IsPositiveOp.XAverageConditional(this.Y, this.vdouble13_F); this.vdouble11_B = GaussianFromMeanAndVarianceOp.MeanAverageConditional(this.vdouble13_use_B, 1); this.bias_use_B = DoublePlusOp.BAverageConditional(this.vdouble11_B, this.score_F); this.bias_marginal_F = VariableOp.MarginalAverageConditional <Gaussian>(this.bias_use_B, this.BiasPrior, this.bias_marginal_F); this.score_B = DoublePlusOp.AAverageConditional(this.vdouble11_B, this.BiasPrior); this.product_B = FastSumOp.ArrayAverageConditional <DistributionStructArray <Gaussian, double> >(this.score_B, this.score_F, this.product_F, this.product_B); for (int userFeature = 0; userFeature < this.XValueCount; userFeature++) { this.wSparse_use_B[userFeature] = GaussianProductOp.BAverageConditional(this.product_B[userFeature], this.XValues[userFeature]); } this.wSparse_marginal_F = DerivedVariableOp.MarginalAverageConditional <DistributionStructArray <Gaussian, double> >(this.wSparse_use_B, this.wSparse_F, this.wSparse_marginal_F); this.Changed_y_biasPrior_xValueCount_xValues_wPrior_xIndices_iterationsDone = 1; }
public static Gaussian FindxF(Gaussian xB, Gaussian meanPrior, Gamma precPrior, Gaussian xF) { Gaussian xF3 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior); Func <Vector, double> func = delegate(Vector x2) { Gaussian xF2 = Gaussian.FromMeanAndPrecision(x2[0], System.Math.Exp(x2[1])); Gaussian xB2 = IsPositiveOp.XAverageConditional(true, xF2); //return (xF2*xB2).MaxDiff(xF2*xB) + (xF3*xB).MaxDiff(xF2*xB); //return KlDiv(xF2*xB2, xF2*xB) + KlDiv(xF3*xB, xF2*xB); //return KlDiv(xF3*xB, xF2*xB) + Math.Pow((xF2*xB2).GetMean() - (xF2*xB).GetMean(),2); return(KlDiv(xF2 * xB2, xF2 * xB) + MeanError(xF3 * xB, xF2 * xB)); }; double m = xF.GetMean(); double p = xF.Precision; Vector x = Vector.FromArray(m, System.Math.Log(p)); Minimize2(func, x); //MinimizePowell(func, x); return(Gaussian.FromMeanAndPrecision(x[0], System.Math.Exp(x[1]))); }
private Gaussian StudentIsPositiveExact(double mean, Gamma precPrior, out double evidence) { // importance sampling for true answer GaussianEstimator est = new GaussianEstimator(); int nSamples = 1000000; evidence = 0; for (int iter = 0; iter < nSamples; iter++) { double precSample = precPrior.Sample(); Gaussian xPrior = Gaussian.FromMeanAndPrecision(mean, precSample); double logWeight = IsPositiveOp.LogAverageFactor(true, xPrior); evidence += System.Math.Exp(logWeight); double xSample = xPrior.Sample(); if (xSample > 0) { est.Add(xSample); } } evidence /= nSamples; return(est.GetDistribution(new Gaussian())); }
/// <summary>Computations that depend on the observed value of InstanceCount and FeatureCount and FeatureValues and numberOfIterationsDecreased and WeightPriors and WeightConstraints</summary> private void Changed_InstanceCount_FeatureCount_FeatureValues_numberOfIterationsDecreased_WeightPriors_WeightCons10() { if (this.Changed_InstanceCount_FeatureCount_FeatureValues_numberOfIterationsDecreased_WeightPriors_WeightCons10_iterationsDone == 1) { return; } for (int InstanceRange = 0; InstanceRange < this.instanceCount; InstanceRange++) { for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { // Message to 'FeatureScores' from Product factor this.FeatureScores_F[InstanceRange][FeatureRange] = GaussianProductOp.ProductAverageConditional(this.featureValues[InstanceRange][FeatureRange], this.Weights_depth1_rep_F[FeatureRange][InstanceRange]); } // Message to 'Score' from Sum factor this.Score_F[InstanceRange] = FastSumOp.SumAverageConditional(this.FeatureScores_F[InstanceRange]); // Message to 'NoisyScore' from GaussianFromMeanAndVariance factor this.NoisyScore_F[InstanceRange] = GaussianFromMeanAndVarianceOp.SampleAverageConditional(this.Score_F[InstanceRange], 1.0); // Message to 'Labels' from IsPositive factor this.Labels_F[InstanceRange] = IsPositiveOp.IsPositiveAverageConditional(this.NoisyScore_F[InstanceRange]); // Message to 'Labels_marginal' from DerivedVariable factor this.Labels_marginal_F[InstanceRange] = DerivedVariableOp.MarginalAverageConditional <Bernoulli>(this.Labels_use_B[InstanceRange], this.Labels_F[InstanceRange], this.Labels_marginal_F[InstanceRange]); } this.Changed_InstanceCount_FeatureCount_FeatureValues_numberOfIterationsDecreased_WeightPriors_WeightCons10_iterationsDone = 1; }
public static Gaussian FindxF0(Gaussian xB, Gaussian meanPrior, Gamma precPrior, Gaussian xF) { Gaussian xF3 = GaussianOp_Slow.SampleAverageConditional(xB, meanPrior, precPrior); Func <double, double> func = delegate(double tau2) { Gaussian xF2 = Gaussian.FromNatural(tau2, 0); if (tau2 >= 0) { return(double.PositiveInfinity); } Gaussian xB2 = IsPositiveOp.XAverageConditional(true, xF2); //return (xF2*xB2).MaxDiff(xF2*xB) + (xF3*xB).MaxDiff(xF2*xB); //return KlDiv(xF2*xB2, xF2*xB) + KlDiv(xF3*xB, xF2*xB); //return KlDiv(xF3*xB, xF2*xB) + Math.Pow((xF2*xB2).GetMean() - (xF2*xB).GetMean(), 2); return(KlDiv(xF2 * xB2, xF2 * xB) + MeanError(xF3 * xB, xF2 * xB)); }; double tau = xF.MeanTimesPrecision; double fmin; tau = Minimize(func, tau, out fmin); //MinimizePowell(func, x); return(Gaussian.FromNatural(tau, 0)); }
/// <summary>Computations that depend on the observed value of FeatureCount and FeatureValues and InstanceCount and Labels and numberOfIterations and WeightConstraints and WeightPriors</summary> /// <param name="numberOfIterations">The number of times to iterate each loop</param> private void Changed_FeatureCount_FeatureValues_InstanceCount_Labels_numberOfIterations_WeightConstraints_WeightP8(int numberOfIterations) { if (this.Changed_FeatureCount_FeatureValues_InstanceCount_Labels_numberOfIterations_WeightConstraints_WeightP8_isDone) { return; } for (int iteration = this.numberOfIterationsDone; iteration < numberOfIterations; iteration++) { for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { this.Weights_depth1_rep_F_marginal[FeatureRange] = ReplicateOp_Divide.Marginal <Gaussian>(this.Weights_depth1_rep_B_toDef[FeatureRange], this.Weights_uses_F[1][FeatureRange], this.Weights_depth1_rep_F_marginal[FeatureRange]); } for (int InstanceRange = 0; InstanceRange < this.instanceCount; InstanceRange++) { for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { this.Weights_depth1_rep_F[FeatureRange][InstanceRange] = ReplicateOp_Divide.UsesAverageConditional <Gaussian>(this.Weights_depth1_rep_B[FeatureRange][InstanceRange], this.Weights_depth1_rep_F_marginal[FeatureRange], InstanceRange, this.Weights_depth1_rep_F[FeatureRange][InstanceRange]); this.FeatureScores_F[InstanceRange][FeatureRange] = GaussianProductOpBase.ProductAverageConditional(this.featureValues[InstanceRange][FeatureRange], this.Weights_depth1_rep_F[FeatureRange][InstanceRange]); } this.Score_F[InstanceRange] = FastSumOp.SumAverageConditional(this.FeatureScores_F[InstanceRange]); this.NoisyScore_F[InstanceRange] = GaussianFromMeanAndVarianceOp.SampleAverageConditional(this.Score_F[InstanceRange], 1.0); this.NoisyScore_use_B[InstanceRange] = IsPositiveOp_Proper.XAverageConditional(Bernoulli.PointMass(this.labels[InstanceRange]), this.NoisyScore_F[InstanceRange]); this.Score_B[InstanceRange] = GaussianFromMeanAndVarianceOp.MeanAverageConditional(this.NoisyScore_use_B[InstanceRange], 1.0); this.FeatureScores_B[InstanceRange] = FastSumOp.ArrayAverageConditional <DistributionStructArray <Gaussian, double> >(this.Score_B[InstanceRange], this.Score_F[InstanceRange], this.FeatureScores_F[InstanceRange], this.FeatureScores_B[InstanceRange]); for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { this.Weights_depth1_rep_B[FeatureRange][InstanceRange] = GaussianProductOpBase.BAverageConditional(this.FeatureScores_B[InstanceRange][FeatureRange], this.featureValues[InstanceRange][FeatureRange]); this.Weights_depth1_rep_F_marginal[FeatureRange] = ReplicateOp_Divide.MarginalIncrement <Gaussian>(this.Weights_depth1_rep_F_marginal[FeatureRange], this.Weights_depth1_rep_F[FeatureRange][InstanceRange], this.Weights_depth1_rep_B[FeatureRange][InstanceRange]); } } for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { this.Weights_depth1_rep_B_toDef[FeatureRange] = ReplicateOp_Divide.ToDef <Gaussian>(this.Weights_depth1_rep_B[FeatureRange], this.Weights_depth1_rep_B_toDef[FeatureRange]); } this.OnProgressChanged(new ProgressChangedEventArgs(iteration)); } for (int _iv = 0; _iv < this.featureCount; _iv++) { this.Weights_uses_B[1][_iv] = ArrayHelper.SetTo <Gaussian>(this.Weights_uses_B[1][_iv], this.Weights_depth1_rep_B_toDef[_iv]); } this.Weights_uses_F[0] = ReplicateOp_NoDivide.UsesAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.weightPriors, 0, this.Weights_uses_F[0]); this.ModelSelector_selector_cases_0_uses_B[6] = Bernoulli.FromLogOdds(ReplicateOp.LogEvidenceRatio <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.weightPriors, this.Weights_uses_F)); this.ModelSelector_selector_cases_0_uses_B[7] = Bernoulli.FromLogOdds(ConstrainEqualRandomOp <double[]> .LogEvidenceRatio <DistributionStructArray <Gaussian, double> >(this.Weights_uses_F[0], this.weightConstraints)); for (int FeatureRange = 0; FeatureRange < this.featureCount; FeatureRange++) { this.ModelSelector_selector_cases_0_rep3_uses_B[FeatureRange][1] = Bernoulli.FromLogOdds(ReplicateOp.LogEvidenceRatio <Gaussian>(this.Weights_depth1_rep_B[FeatureRange], this.Weights_uses_F[1][FeatureRange], this.Weights_depth1_rep_F[FeatureRange])); this.ModelSelector_selector_cases_0_rep3_B[FeatureRange] = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_rep3_uses_B[FeatureRange], this.ModelSelector_selector_cases_0_rep3_B[FeatureRange]); } this.ModelSelector_selector_cases_0_uses_B[12] = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_rep3_B, this.ModelSelector_selector_cases_0_uses_B[12]); for (int InstanceRange = 0; InstanceRange < this.instanceCount; InstanceRange++) { this.ModelSelector_selector_cases_0_rep8_B[InstanceRange] = Bernoulli.FromLogOdds(IsPositiveOp.LogEvidenceRatio(this.labels[InstanceRange], this.NoisyScore_F[InstanceRange])); } this.ModelSelector_selector_cases_0_uses_B[17] = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_rep8_B, this.ModelSelector_selector_cases_0_uses_B[17]); this.ModelSelector_selector_cases_0_B = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_uses_B, this.ModelSelector_selector_cases_0_B); this.ModelSelector_selector_cases_B[0] = ArrayHelper.SetTo <Bernoulli>(this.ModelSelector_selector_cases_B[0], this.ModelSelector_selector_cases_0_B); this.ModelSelector_selector_B = CasesOp.BAverageConditional(this.ModelSelector_selector_cases_B); this.ModelSelector_marginal_F = VariableOp.MarginalAverageConditional <Bernoulli>(this.ModelSelector_selector_B, this.vBernoulli0, this.ModelSelector_marginal_F); this.Weights_use_B = ReplicateOp_NoDivide.DefAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.Weights_use_B); this.Weights_marginal_F = VariableOp.MarginalAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_use_B, this.weightPriors, this.Weights_marginal_F); this.Changed_FeatureCount_FeatureValues_InstanceCount_Labels_numberOfIterations_WeightConstraints_WeightP8_isDone = true; }
/// <summary>Computations that depend on the observed value of FeatureIndexes and FeatureValues and InstanceCount and InstanceFeatureCounts and Labels and numberOfIterations and WeightConstraints and WeightPriors</summary> /// <param name="numberOfIterations">The number of times to iterate each loop</param> private void Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_Labels_numberOfIterations_W7(int numberOfIterations) { if (this.Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_Labels_numberOfIterations_W7_isDone) { return; } for (int iteration = this.numberOfIterationsDone; iteration < numberOfIterations; iteration++) { for (int InstanceRange = 0; InstanceRange < this.instanceCount; InstanceRange++) { this.Weights_FeatureIndexes_F[InstanceRange] = JaggedSubarrayWithMarginalOp <double> .ItemsAverageConditional <DistributionStructArray <Gaussian, double>, Gaussian, DistributionStructArray <Gaussian, double> >(this.IndexedWeights_B[InstanceRange], this.Weights_uses_F[1], this.Weights_marginal_F, this.featureIndexes, InstanceRange, this.Weights_FeatureIndexes_F[InstanceRange]); for (int InstanceFeatureRanges = 0; InstanceFeatureRanges < this.instanceFeatureCounts[InstanceRange]; InstanceFeatureRanges++) { this.FeatureScores_F[InstanceRange][InstanceFeatureRanges] = GaussianProductOpBase.ProductAverageConditional(this.featureValues[InstanceRange][InstanceFeatureRanges], this.Weights_FeatureIndexes_F[InstanceRange][InstanceFeatureRanges]); } this.Score_F[InstanceRange] = FastSumOp.SumAverageConditional(this.FeatureScores_F[InstanceRange]); this.NoisyScore_F[InstanceRange] = GaussianFromMeanAndVarianceOp.SampleAverageConditional(this.Score_F[InstanceRange], 1.0); this.NoisyScore_use_B[InstanceRange] = IsPositiveOp_Proper.XAverageConditional(Bernoulli.PointMass(this.labels[InstanceRange]), this.NoisyScore_F[InstanceRange]); this.Score_B[InstanceRange] = GaussianFromMeanAndVarianceOp.MeanAverageConditional(this.NoisyScore_use_B[InstanceRange], 1.0); this.FeatureScores_B[InstanceRange] = FastSumOp.ArrayAverageConditional <DistributionStructArray <Gaussian, double> >(this.Score_B[InstanceRange], this.Score_F[InstanceRange], this.FeatureScores_F[InstanceRange], this.FeatureScores_B[InstanceRange]); for (int InstanceFeatureRanges = 0; InstanceFeatureRanges < this.instanceFeatureCounts[InstanceRange]; InstanceFeatureRanges++) { this.IndexedWeights_B[InstanceRange][InstanceFeatureRanges] = GaussianProductOpBase.BAverageConditional(this.FeatureScores_B[InstanceRange][InstanceFeatureRanges], this.featureValues[InstanceRange][InstanceFeatureRanges]); } this.Weights_marginal_F = JaggedSubarrayWithMarginalOp <double> .MarginalIncrementItems <DistributionStructArray <Gaussian, double>, Gaussian, DistributionStructArray <Gaussian, double> >(this.IndexedWeights_B[InstanceRange], this.Weights_FeatureIndexes_F[InstanceRange], this.featureIndexes, InstanceRange, this.Weights_marginal_F); } this.OnProgressChanged(new ProgressChangedEventArgs(iteration)); } this.Weights_uses_B[1] = JaggedSubarrayWithMarginalOp <double> .ArrayAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_uses_F[1], this.Weights_marginal_F, this.Weights_uses_B[1]); this.Weights_uses_F[0] = ReplicateOp_NoDivide.UsesAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.weightPriors, 0, this.Weights_uses_F[0]); this.ModelSelector_selector_cases_0_uses_B[3] = Bernoulli.FromLogOdds(ReplicateOp.LogEvidenceRatio <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.weightPriors, this.Weights_uses_F)); this.ModelSelector_selector_cases_0_uses_B[4] = Bernoulli.FromLogOdds(ConstrainEqualRandomOp <double[]> .LogEvidenceRatio <DistributionStructArray <Gaussian, double> >(this.Weights_uses_F[0], this.weightConstraints)); this.ModelSelector_selector_cases_0_uses_B[8] = Bernoulli.FromLogOdds(JaggedSubarrayWithMarginalOp <double> .LogEvidenceRatio <Gaussian, DistributionRefArray <DistributionStructArray <Gaussian, double>, double[]>, DistributionStructArray <Gaussian, double> >(this.IndexedWeights_B, this.Weights_uses_F[1], this.featureIndexes, this.Weights_FeatureIndexes_F)); for (int InstanceRange = 0; InstanceRange < this.instanceCount; InstanceRange++) { this.ModelSelector_selector_cases_0_rep9_B[InstanceRange] = Bernoulli.FromLogOdds(IsPositiveOp.LogEvidenceRatio(this.labels[InstanceRange], this.NoisyScore_F[InstanceRange])); } this.ModelSelector_selector_cases_0_uses_B[16] = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_rep9_B, this.ModelSelector_selector_cases_0_uses_B[16]); this.ModelSelector_selector_cases_0_B = ReplicateOp_NoDivide.DefAverageConditional <Bernoulli>(this.ModelSelector_selector_cases_0_uses_B, this.ModelSelector_selector_cases_0_B); this.ModelSelector_selector_cases_B[0] = ArrayHelper.SetTo <Bernoulli>(this.ModelSelector_selector_cases_B[0], this.ModelSelector_selector_cases_0_B); this.ModelSelector_selector_B = CasesOp.BAverageConditional(this.ModelSelector_selector_cases_B); this.ModelSelector_marginal_F = VariableOp.MarginalAverageConditional <Bernoulli>(this.ModelSelector_selector_B, this.vBernoulli1, this.ModelSelector_marginal_F); this.Weights_B = ReplicateOp_NoDivide.DefAverageConditional <DistributionStructArray <Gaussian, double> >(this.Weights_uses_B, this.Weights_B); this.Changed_FeatureIndexes_FeatureValues_InstanceCount_InstanceFeatureCounts_Labels_numberOfIterations_W7_isDone = true; }
public void Sample(Options options, Matrix data) { if (options.numParams > 2) { throw new Exception("numParams > 2"); } int numStudents = data.Rows; int numQuestions = data.Cols; // initialize the sampler at the mean of the priors (not sampling from the priors) double abilityMean = abilityMeanPrior.GetMean(); double abilityPrec = abilityPrecPrior.GetMean(); double difficultyMean = difficultyMeanPrior.GetMean(); double difficultyPrec = difficultyPrecPrior.GetMean(); double discriminationMean = discriminationMeanPrior.GetMean(); double discriminationPrec = discriminationPrecPrior.GetMean(); double[] ability = new double[numStudents]; double[] difficulty = new double[numQuestions]; List <double>[] difficultySamples = new List <double> [numQuestions]; GaussianEstimator[] difficultyEstimator = new GaussianEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { difficultyEstimator[question] = new GaussianEstimator(); difficultySamples[question] = new List <double>(); if (difficultyObserved != null) { difficulty[question] = difficultyObserved[question]; difficultyEstimator[question].Add(difficultyObserved[question]); difficultySamples[question].Add(difficultyObserved[question]); } } List <double>[] abilitySamples = new List <double> [numStudents]; GaussianEstimator[] abilityEstimator = new GaussianEstimator[ability.Length]; for (int student = 0; student < abilityEstimator.Length; student++) { abilityEstimator[student] = new GaussianEstimator(); abilitySamples[student] = new List <double>(); if (abilityObserved != null) { ability[student] = abilityObserved[student]; abilityEstimator[student].Add(abilityObserved[student]); abilitySamples[student].Add(abilityObserved[student]); } } double[] discrimination = new double[numQuestions]; List <double>[] discriminationSamples = new List <double> [numQuestions]; GammaEstimator[] discriminationEstimator = new GammaEstimator[numQuestions]; for (int question = 0; question < numQuestions; question++) { discriminationEstimator[question] = new GammaEstimator(); discriminationSamples[question] = new List <double>(); discrimination[question] = 1; if (discriminationObserved != null) { discrimination[question] = discriminationObserved[question]; discriminationEstimator[question].Add(discriminationObserved[question]); discriminationSamples[question].Add(discriminationObserved[question]); } } responseProbMean = new Matrix(numStudents, numQuestions); int niters = options.numberOfSamples; int burnin = options.burnIn; double logisticVariance = Math.PI * Math.PI / 3; double shape = 4.5; Gamma precPrior = Gamma.FromShapeAndRate(shape, (shape - 1) * logisticVariance); precPrior = Gamma.PointMass(1); double[,] prec = new double[numStudents, numQuestions]; double[,] x = new double[numStudents, numQuestions]; int numRejected = 0, numAttempts = 0; for (int iter = 0; iter < niters; iter++) { for (int student = 0; student < numStudents; student++) { for (int question = 0; question < numQuestions; question++) { // sample prec given ability, difficulty, x // N(x; ability-difficulty, 1/prec) = Gamma(prec; 1.5, (x-ability+difficulty)^2/2) Gamma precPost = precPrior; double xMean = (ability[student] - difficulty[question]) * discrimination[question]; double delta = x[student, question] - xMean; Gamma like = Gamma.FromShapeAndRate(1.5, 0.5 * delta * delta); precPost.SetToProduct(precPost, like); prec[student, question] = precPost.Sample(); // sample x given ability, difficulty, prec, data // using an independence chain MH bool y = (data[student, question] > 0); double sign = y ? 1.0 : -1.0; Gaussian xPrior = Gaussian.FromMeanAndPrecision(xMean, prec[student, question]); // we want to sample from xPrior*I(x>0) // instead we sample from xPost Gaussian xPost = xPrior * IsPositiveOp.XAverageConditional(y, xPrior); double oldx = x[student, question]; double newx = xPost.Sample(); numAttempts++; if (newx * sign < 0) { newx = oldx; // rejected numRejected++; } else { // importance weights double oldw = xPrior.GetLogProb(oldx) - xPost.GetLogProb(oldx); double neww = xPrior.GetLogProb(newx) - xPost.GetLogProb(newx); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { newx = oldx; // rejected numRejected++; } } x[student, question] = newx; if (iter >= burnin) { double responseProb = MMath.Logistic(xMean); responseProbMean[student, question] += responseProb; } } } if (abilityObserved == null) { // sample ability given difficulty, prec, x for (int student = 0; student < numStudents; student++) { Gaussian post = Gaussian.FromMeanAndPrecision(abilityMean, abilityPrec); for (int question = 0; question < numQuestions; question++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(ability; x/disc+difficulty, 1/disc^2/prec) Gaussian abilityLike = Gaussian.FromMeanAndPrecision(x[student, question] / discrimination[question] + difficulty[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, abilityLike); } ability[student] = post.Sample(); if (iter >= burnin) { abilityEstimator[student].Add(post); abilitySamples[student].Add(ability[student]); } } } // sample difficulty given ability, prec, x for (int question = 0; question < numQuestions; question++) { Gaussian post = Gaussian.FromMeanAndPrecision(difficultyMean, difficultyPrec); for (int student = 0; student < numStudents; student++) { // N(x; disc*(ability-difficulty), 1/prec) =propto N(x/disc; ability-difficulty, 1/disc^2/prec) = N(difficulty; ability-x/disc, 1/disc^2/prec) if (discrimination[question] > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(ability[student] - x[student, question] / discrimination[question], prec[student, question] * discrimination[question] * discrimination[question]); post.SetToProduct(post, like); } } difficulty[question] = post.Sample(); if (iter >= burnin) { //if (difficulty[question] > 100) // Console.WriteLine("difficulty[{0}] = {1}", question, difficulty[question]); difficultyEstimator[question].Add(post); difficultySamples[question].Add(difficulty[question]); } } if (options.numParams > 1 && discriminationObserved == null) { // sample discrimination given ability, difficulty, prec, x for (int question = 0; question < numQuestions; question++) { // moment-matching on the prior Gaussian approxPrior = Gaussian.FromMeanAndVariance(Math.Exp(discriminationMean + 0.5 / discriminationPrec), Math.Exp(2 * discriminationMean + 1 / discriminationPrec) * (Math.Exp(1 / discriminationPrec) - 1)); Gaussian post = approxPrior; for (int student = 0; student < numStudents; student++) { // N(x; disc*delta, 1/prec) =propto N(x/delta; disc, 1/prec/delta^2) double delta = ability[student] - difficulty[question]; if (delta > 0) { Gaussian like = Gaussian.FromMeanAndPrecision(x[student, question] / delta, prec[student, question] * delta * delta); post.SetToProduct(post, like); } } TruncatedGaussian postTrunc = new TruncatedGaussian(post, 0, double.PositiveInfinity); double olddisc = discrimination[question]; double newdisc = postTrunc.Sample(); // importance weights Func <double, double> priorLogProb = delegate(double d) { double logd = Math.Log(d); return(Gaussian.GetLogProb(logd, discriminationMean, 1 / discriminationPrec) - logd); }; double oldw = priorLogProb(olddisc) - approxPrior.GetLogProb(olddisc); double neww = priorLogProb(newdisc) - approxPrior.GetLogProb(newdisc); // acceptance ratio double paccept = Math.Exp(neww - oldw); if (paccept < 1 && Rand.Double() > paccept) { // rejected } else { discrimination[question] = newdisc; } if (iter >= burnin) { discriminationEstimator[question].Add(discrimination[question]); discriminationSamples[question].Add(discrimination[question]); } } } // sample abilityMean given ability, abilityPrec Gaussian abilityMeanPost = abilityMeanPrior; for (int student = 0; student < numStudents; student++) { Gaussian like = GaussianOp.MeanAverageConditional(ability[student], abilityPrec); abilityMeanPost *= like; } abilityMean = abilityMeanPost.Sample(); // sample abilityPrec given ability, abilityMean Gamma abilityPrecPost = abilityPrecPrior; for (int student = 0; student < numStudents; student++) { Gamma like = GaussianOp.PrecisionAverageConditional(ability[student], abilityMean); abilityPrecPost *= like; } abilityPrec = abilityPrecPost.Sample(); // sample difficultyMean given difficulty, difficultyPrec Gaussian difficultyMeanPost = difficultyMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(difficulty[question], difficultyPrec); difficultyMeanPost *= like; } difficultyMean = difficultyMeanPost.Sample(); // sample difficultyPrec given difficulty, difficultyMean Gamma difficultyPrecPost = difficultyPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(difficulty[question], difficultyMean); difficultyPrecPost *= like; } difficultyPrec = difficultyPrecPost.Sample(); // sample discriminationMean given discrimination, discriminationPrec Gaussian discriminationMeanPost = discriminationMeanPrior; for (int question = 0; question < numQuestions; question++) { Gaussian like = GaussianOp.MeanAverageConditional(Math.Log(discrimination[question]), discriminationPrec); discriminationMeanPost *= like; } discriminationMean = discriminationMeanPost.Sample(); // sample discriminationPrec given discrimination, discriminationMean Gamma discriminationPrecPost = discriminationPrecPrior; for (int question = 0; question < numQuestions; question++) { Gamma like = GaussianOp.PrecisionAverageConditional(Math.Log(discrimination[question]), discriminationMean); discriminationPrecPost *= like; } discriminationPrec = discriminationPrecPost.Sample(); //if (iter % 1 == 0) // Console.WriteLine("iter = {0}", iter); } //Console.WriteLine("abilityMean = {0}, abilityPrec = {1}", abilityMean, abilityPrec); //Console.WriteLine("difficultyMean = {0}, difficultyPrec = {1}", difficultyMean, difficultyPrec); int numSamplesUsed = niters - burnin; responseProbMean.Scale(1.0 / numSamplesUsed); //Console.WriteLine("acceptance rate = {0}", ((double)numAttempts - numRejected)/numAttempts); difficultyPost = Array.ConvertAll(difficultyEstimator, est => est.GetDistribution(Gaussian.Uniform())); abilityPost = Array.ConvertAll(abilityEstimator, est => est.GetDistribution(Gaussian.Uniform())); if (options.numParams > 1) { discriminationPost = Array.ConvertAll(discriminationEstimator, est => est.GetDistribution(new Gamma())); } abilityCred = GetCredibleIntervals(options.credibleIntervalProbability, abilitySamples); difficultyCred = GetCredibleIntervals(options.credibleIntervalProbability, difficultySamples); bool saveSamples = false; if (saveSamples) { using (MatlabWriter writer = new MatlabWriter(@"..\..\samples.mat")) { int q = 11; writer.Write("difficulty", difficultySamples[q]); writer.Write("discrimination", discriminationSamples[q]); } } }
internal void StudentIsPositiveTest2() { GaussianOp.ForceProper = false; double shape = 1; double mean = -1; Gamma precPrior = Gamma.FromShapeAndRate(shape, shape); Gaussian meanPrior = Gaussian.PointMass(mean); double evExpected; Gaussian xExpected = StudentIsPositiveExact(mean, precPrior, out evExpected); Gaussian xF2 = Gaussian.FromMeanAndVariance(-1, 1); // the energy has a stationary point here (min in both dimensions), even though xF0 is improper Gaussian xB0 = new Gaussian(2, 1); xF2 = Gaussian.FromMeanAndVariance(-4.552, 6.484); //xB0 = new Gaussian(1.832, 0.9502); //xB0 = new Gaussian(1.792, 1.558); //xB0 = new Gaussian(1.71, 1.558); //xB0 = new Gaussian(1.792, 1.5); Gaussian xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); //Console.WriteLine("xB0 = {0} xF0 = {1}", xB0, xF0); //Console.WriteLine(xF0*xB0); //Console.WriteLine(xF2*xB0); xF2 = new Gaussian(0.8651, 1.173); xB0 = new Gaussian(-4, 2); xB0 = new Gaussian(7, 7); if (false) { xF2 = new Gaussian(mean, 1); double[] xs = EpTests.linspace(0, 100, 1000); double[] logTrue = Util.ArrayInit(xs.Length, i => GaussianOp.LogAverageFactor(xs[i], mean, precPrior)); Normalize(logTrue); xF2 = FindxF4(xs, logTrue, xF2); xF2 = Gaussian.FromNatural(-0.85, 0); xB0 = IsPositiveOp.XAverageConditional(true, xF2); Console.WriteLine("xF = {0} xB = {1}", xF2, xB0); Console.WriteLine("x = {0} should be {1}", xF2 * xB0, xExpected); Console.WriteLine("proj[T*xB] = {0}", GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior) * xB0); double ev = System.Math.Exp(IsPositiveOp.LogAverageFactor(true, xF2) + GaussianOp_Slow.LogAverageFactor(xB0, meanPrior, precPrior) - xF2.GetLogAverageOf(xB0)); Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } if (false) { xF2 = new Gaussian(mean, 1); xF2 = FindxF3(xExpected, evExpected, meanPrior, precPrior, xF2); xB0 = IsPositiveOp.XAverageConditional(true, xF2); Console.WriteLine("xF = {0} xB = {1}", xF2, xB0); Console.WriteLine("x = {0} should be {1}", xF2 * xB0, xExpected); //double ev = Math.Exp(IsPositiveOp.LogAverageFactor(true, xF2) + GaussianOp.LogAverageFactor_slow(xB0, meanPrior, precPrior) - xF2.GetLogAverageOf(xB0)); //Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } if (false) { xF2 = new Gaussian(-2, 10); xF2 = FindxF2(meanPrior, precPrior, xF2); xB0 = IsPositiveOp.XAverageConditional(true, xF2); xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xF0, xF2); return; } if (false) { xF2 = new Gaussian(-3998, 4000); xF2 = new Gaussian(0.8651, 1.173); xB0 = new Gaussian(-4, 2); xB0 = new Gaussian(2000, 1e-5); xB0 = FindxB(xB0, meanPrior, precPrior, xF2); xF0 = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xF0, xF2); return; } if (false) { //xF2 = new Gaussian(-7, 10); //xF2 = new Gaussian(-50, 52); xB0 = new Gaussian(-1.966, 5.506e-08); //xF2 = new Gaussian(-3998, 4000); xF0 = FindxF(xB0, meanPrior, precPrior, xF2); Gaussian xB2 = IsPositiveOp.XAverageConditional(true, xF0); Console.WriteLine("xF = {0}", xF0); Console.WriteLine("xB = {0} should be {1}", xB2, xB0); return; } if (true) { xF0 = new Gaussian(-3.397e+08, 5.64e+08); xF0 = new Gaussian(-2.373e+04, 2.8e+04); xB0 = new Gaussian(2.359, 1.392); xF0 = Gaussian.FromNatural(-0.84, 0); //xF0 = Gaussian.FromNatural(-0.7, 0); for (int iter = 0; iter < 10; iter++) { xB0 = FindxB(xB0, meanPrior, precPrior, xF0); Gaussian xFt = GaussianOp_Slow.SampleAverageConditional(xB0, meanPrior, precPrior); Console.WriteLine("xB = {0}", xB0); Console.WriteLine("xF = {0} should be {1}", xFt, xF0); xF0 = FindxF0(xB0, meanPrior, precPrior, xF0); Gaussian xBt = IsPositiveOp.XAverageConditional(true, xF0); Console.WriteLine("xF = {0}", xF0); Console.WriteLine("xB = {0} should be {1}", xBt, xB0); } Console.WriteLine("x = {0} should be {1}", xF0 * xB0, xExpected); double ev = System.Math.Exp(IsPositiveOp.LogAverageFactor(true, xF0) + GaussianOp_Slow.LogAverageFactor(xB0, meanPrior, precPrior) - xF0.GetLogAverageOf(xB0)); Console.WriteLine("evidence = {0} should be {1}", ev, evExpected); return; } //var precs = EpTests.linspace(1e-6, 1e-5, 200); var precs = EpTests.linspace(xB0.Precision / 11, xB0.Precision, 100); //var precs = EpTests.linspace(xF0.Precision/20, xF0.Precision/3, 100); precs = EpTests.linspace(1e-9, 1e-5, 100); //precs = new double[] { xB0.Precision }; var ms = EpTests.linspace(xB0.GetMean() - 1, xB0.GetMean() + 1, 100); //var ms = EpTests.linspace(xF0.GetMean()-1, xF0.GetMean()+1, 100); //precs = EpTests.linspace(1.0/10, 1.0/8, 200); ms = EpTests.linspace(2000, 4000, 100); //ms = new double[] { xB0.GetMean() }; Matrix result = new Matrix(precs.Length, ms.Length); Matrix result2 = new Matrix(precs.Length, ms.Length); //ms = new double[] { 0.7 }; for (int j = 0; j < ms.Length; j++) { double maxZ = double.NegativeInfinity; double minZ = double.PositiveInfinity; Gaussian maxxF = Gaussian.Uniform(); Gaussian minxF = Gaussian.Uniform(); Gaussian maxxB = Gaussian.Uniform(); Gaussian minxB = Gaussian.Uniform(); Vector v = Vector.Zero(3); for (int i = 0; i < precs.Length; i++) { Gaussian xF = Gaussian.FromMeanAndPrecision(ms[j], precs[i]); xF = xF2; Gaussian xB = IsPositiveOp.XAverageConditional(true, xF); xB = Gaussian.FromMeanAndPrecision(ms[j], precs[i]); //xB = xB0; v[0] = IsPositiveOp.LogAverageFactor(true, xF); v[1] = GaussianOp.LogAverageFactor_slow(xB, meanPrior, precPrior); //v[1] = GaussianOp_Slow.LogAverageFactor(xB, meanPrior, precPrior); v[2] = -xF.GetLogAverageOf(xB); double logZ = v.Sum(); double Z = logZ; if (Z > maxZ) { maxZ = Z; maxxF = xF; maxxB = xB; } if (Z < minZ) { minZ = Z; minxF = xF; minxB = xB; } result[i, j] = Z; result2[i, j] = IsPositiveOp.LogAverageFactor(true, xF) + xF0.GetLogAverageOf(xB) - xF.GetLogAverageOf(xB); //Gaussian xF3 = GaussianOp.SampleAverageConditional_slower(xB, meanPrior, precPrior); //result[i, j] = Math.Pow(xF3.Precision - xF.Precision, 2); //result2[i, j] = Math.Pow((xF2*xB).Precision - (xF*xB).Precision, 2); //result2[i, j] = -xF.GetLogAverageOf(xB); //Gaussian xF2 = GaussianOp.SampleAverageConditional_slow(xB, Gaussian.PointMass(0), precPrior); Gaussian xMarginal = xF * xB; //Console.WriteLine("xF = {0} Z = {1} x = {2}", xF, Z.ToString("g4"), xMarginal); } double delta = v[1] - v[2]; //Console.WriteLine("xF = {0} xB = {1} maxZ = {2} x = {3}", maxxF, maxxB, maxZ.ToString("g4"), maxxF*maxxB); //Console.WriteLine("xF = {0} maxZ = {1} delta = {2}", maxxF, maxZ.ToString("g4"), delta.ToString("g4")); Console.WriteLine("xF = {0} xB = {1} minZ = {2} x = {3}", minxF, minxB, minZ.ToString("g4"), minxF * minxB); } //TODO: change path for cross platform using using (var writer = new MatlabWriter(@"..\..\..\Tests\student.mat")) { writer.Write("z", result); writer.Write("z2", result2); writer.Write("precs", precs); writer.Write("ms", ms); } }