/// <summary> /// EP message to 'selector'. /// </summary> /// <param name="sample">Incoming message from 'sample'.</param> /// <param name="probs">Constant value for 'probs'.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the integral of the factor times incoming messages, over all arguments except 'selector'. /// The formula is <c>int f(selector,x) q(x) dx</c> where <c>x = (sample,probs)</c>. /// </para></remarks> public static Discrete SelectorAverageConditional(Discrete sample, Matrix probs, Discrete result) { Vector v = result.GetWorkspace(); v.SetToProduct(probs, sample.GetProbs()); result.SetProbs(v); return result; }
/// <summary> /// Evidence message for EP. /// </summary> /// <param name="sample">Incoming message from 'sample'.</param> /// <param name="selector">Incoming message from 'selector'.</param> /// <param name="probs">Constant value for 'probs'.</param> //[Skip] //public static double LogEvidenceRatio(Discrete sample, Discrete selector, Matrix probs) { return 0.0; } public static double LogEvidenceRatio(Discrete sample, Discrete selector, Matrix probs) { // use this if the rows are not normalized Discrete toSample = SampleAverageConditional(selector, probs, Discrete.Uniform(sample.Dimension, sample.Sparsity)); return LogAverageFactor(sample, selector, probs) -toSample.GetLogAverageOf(sample); }
/// <summary> /// EP message to 'sample'. /// </summary> /// <param name="selector">Incoming message from 'selector'.</param> /// <param name="probs">Constant value for 'probs'.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the integral of the factor times incoming messages, over all arguments except 'sample'. /// The formula is <c>int f(sample,x) q(x) dx</c> where <c>x = (selector,probs)</c>. /// </para></remarks> public static Discrete SampleAverageConditional(Discrete selector, Matrix probs, Discrete result) { Vector v = result.GetWorkspace(); v.SetToProduct(selector.GetProbs(), probs); result.SetProbs(v); return result; }
/// <summary> /// Initialise the form with the Worker Object /// </summary> /// <param name="currentWorker"></param> public FormTaskDetails(string taskId, ExperimentModel relatedExperimentItem) { InitializeComponent(); this.taskId = taskId; //Display the workerId in the form title this.Text = "[Task]" + taskId; this.relatedExperimentItem = relatedExperimentItem; this.labelForTaskId.Text = taskId; this.labelModelDetail.Text = relatedExperimentItem.ToString(); //load data labelConfusionMatrix.Text = probabilityText; indexOfExperimentItem = MainPage.mainPageForm.currentExperimentSetting.GetExperimenModelIndex(relatedExperimentItem); probabilitiesArray = MainPage.mainPageForm.currentExperimentSetting.GetTaskTrueLabel(indexOfExperimentItem, taskId); labelHeader = ""; int labelCount = 0; //Initialise the probabilitiesArray if (probabilitiesArray != null) { labelCount = probabilitiesArray.Dimension; Enumerable.Range(1, labelCount).ToList().ForEach(i => labelHeader += "Label" + i + " "); // labelForDataHeader.Text = labelHeader; SetUpChart(); //Only sync the background thread if it is activeLearning if (MainPage.mainPageForm.currentExperimentSetting.experimentType == ExperimentType.ActiveLearning) { backgroundTaskValues.RunWorkerAsync(); } }// End if the probabilitiesArray is not null } // End Constructor
public override Statistics run() { Environment env = new Environment(); //dist List<double> valueList = new List<double>() { 1, 2, 3, 4, 5 }; List<double> distribution = new List<double>() { 0.5, 0.6, 0.7, 0.8, 1.0 }; Discrete d = new Discrete(valueList, distribution); //dist1 Uniform n = new Uniform(1, 3); Distribution dist = new Distribution(d); Distribution dist1 = new Distribution(n); Create c = new Create(env, 0, 20, dist); Dispose di = new Dispose(env, 1); Queue q = new Queue(env, 3); q.Capacity = 1; Resource r = new Resource(env, 2, 1, dist1, q); c.Next_AID.Add("First", 2); r.Next_AID.Add("First", 1); env.System_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Start_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Setup_Simulation(); return env.Simulate(); }
public InferenceResult<Cluster[]> Infer(Vector[] observedData, int clusters) { var dimensions = observedData.First().Count; var evidence = Variable.Bernoulli(0.5).Named("evidence"); var evidenceBlock = Variable.If(evidence); var clustersRange = new Range(clusters).Named("clustersRange"); var meansPrior = Variable.Array<Vector>(clustersRange).Named("meansPrior"); meansPrior[clustersRange] = Variable .VectorGaussianFromMeanAndPrecision( Vector.Zero(dimensions), PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01)) .ForEach(clustersRange); var precisionsPrior = Variable.Array<PositiveDefiniteMatrix>(clustersRange).Named("precisionsPrior"); precisionsPrior[clustersRange] = Variable.WishartFromShapeAndRate(100, PositiveDefiniteMatrix.IdentityScaledBy(dimensions, 0.01)) .ForEach(clustersRange); var initialWeights = Enumerable.Range(0, clusters).Select(_ => 1.0).ToArray(); var mixtureWeightsPrior = Variable.Dirichlet(clustersRange, initialWeights).Named("mixtureWeightsPrior"); var dataRange = new Range(observedData.Length).Named("dataRange"); var data = Variable.Array<Vector>(dataRange).Named("data"); var latentIndex = Variable.Array<int>(dataRange).Named("latentIndex"); using (Variable.ForEach(dataRange)) { latentIndex[dataRange] = Variable.Discrete(mixtureWeightsPrior); using (Variable.Switch(latentIndex[dataRange])) { data[dataRange] = Variable.VectorGaussianFromMeanAndPrecision(meansPrior[latentIndex[dataRange]], precisionsPrior[latentIndex[dataRange]]); } } var zinit = new Discrete[dataRange.SizeAsInt]; for (int i = 0; i < zinit.Length; i++) zinit[i] = Discrete.PointMass(Rand.Int(clustersRange.SizeAsInt), clustersRange.SizeAsInt); latentIndex.InitialiseTo(Distribution<int>.Array(zinit)); evidenceBlock.CloseBlock(); data.ObservedValue = observedData; var ie = new InferenceEngine(new VariationalMessagePassing()); ie.ShowProgress = false; var mixtureWeightsPosterior = ie.Infer(mixtureWeightsPrior); var meansPosterior = ie.Infer<VectorGaussian[]>(meansPrior); var precisionsPosterior = ie.Infer<Wishart[]>(precisionsPrior); var bEvidence = ie.Infer<Bernoulli>(evidence); var result = new List<Cluster>(); for (var i = 0; i < clusters; i++) { result.Add(new Cluster(meansPosterior[i].GetMean(), precisionsPosterior[i].GetMean().Inverse())); } return new InferenceResult<Cluster[]>(bEvidence, result.ToArray()); }
/// <summary> /// Configures constant values that will not change during the lifetime of the class. /// </summary> /// <remarks> /// This method should be called once only after the class is instantiated. In future, it will likely become /// the class constructor. /// </remarks> public void Reset() { // Create array for 'vint0_uses' backwards messages. this.vint0_uses_B = new Discrete[0]; this.vint0_F = ArrayHelper.MakeUniform<Discrete>(new Discrete(0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647, 0.0588235294117647)); this.vDiscrete0 = new Discrete(0.5, 0.5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0); // Message to 'vint0' from Random factor this.vint0_F = UnaryOp<int>.RandomAverageConditional<Discrete>(this.vDiscrete0); }
/// <summary> /// Evidence message for EP /// </summary> /// <param name="sample">Constant value for 'sample'.</param> /// <param name="p">Incoming message from 'p'.</param> /// <param name="trialCount">Incoming message from 'trialCount'.</param> /// <returns>Logarithm of the factor's average value across the given argument distributions</returns> /// <remarks><para> /// The formula for the result is <c>log(sum_(p,trialCount) p(p,trialCount) factor(sample,trialCount,p))</c>. /// </para></remarks> public static double LogAverageFactor(int sample, Beta p, Discrete trialCount) { double logZ = Double.NegativeInfinity; for (int n = 0; n < trialCount.Dimension; n++) { logZ = MMath.LogSumExp(logZ, trialCount.GetLogProb(n) + LogAverageFactor(sample, p, n)); } return logZ; }
/// <summary> /// Evidence message for EP /// </summary> /// <param name="sample">Constant value for 'sample'.</param> /// <param name="size">Incoming message from 'size'.</param> /// <returns>Logarithm of the factor's average value across the given argument distributions</returns> /// <remarks><para> /// The formula for the result is <c>log(sum_(size) p(size) factor(sample,size))</c>. /// </para></remarks> public static double LogAverageFactor(int sample, Discrete size) { double z = 0.0; for (int i = sample+1; i < size.Dimension; i++) { z += size[i]/i; } return Math.Log(z); }
/// <summary> /// Evidence message for EP /// </summary> /// <param name="sum">Constant value for 'Sum'.</param> /// <param name="a">Incoming message from 'A'.</param> /// <param name="b">Incoming message from 'B'.</param> /// <returns>Logarithm of the factor's average value across the given argument distributions</returns> /// <remarks><para> /// The formula for the result is <c>log(sum_(A,B) p(A,B) factor(Sum,A,B))</c>. /// </para></remarks> public static double LogAverageFactor(int sum, Discrete a, Discrete b) { if (a.IsPointMass) return LogAverageFactor(sum, a.Point, b); double z = 0.0; for (int i = 0; (i < a.Dimension) && (sum-i < b.Dimension); i++) { z += a[i] * b[sum-i]; } return Math.Log(z); }
/// <summary> /// Evidence message for EP. /// </summary> /// <param name="sample">Constant value for 'sample'.</param> /// <param name="index">Incoming message from 'index'.</param> /// <param name="probTrue">Constant value for 'probTrue'.</param> /// <returns><c>log(int f(x) qnotf(x) dx)</c></returns> /// <remarks><para> /// The formula for the result is <c>log(int f(x) qnotf(x) dx)</c> /// where <c>x = (sample,index,probTrue)</c>. /// </para></remarks> public static double LogAverageFactor(bool sample, Discrete index, double[] probTrue) { double p = 0; for (int i = 0; i < index.Dimension; i++) { p += probTrue[i] * index[i]; } if (!sample) p = 1-p; return Math.Log(p); }
public double run() { Environment env = new Environment(); //dist for Create Constant Const = new Constant(1); Distribution CreateDist = new Distribution(Const); //distributions for Inventory List<double> States = new List<double>() { 1, 2, 3 }; //1 for good, 2 for fair, 3 for poor List<double> States_Prob = new List<double>() { 0.35, 0.80, 1.00 }; Discrete StatesDisc = new Discrete(States, States_Prob); List<double> State1 = new List<double>() { 40, 50, 60, 70, 80, 90, 100 }; List<double> State1_Prob = new List<double>() { 0.03, 0.08, 0.23, 0.43, 0.78, 0.93, 1.00 }; Discrete State1Disc = new Discrete(State1, State1_Prob); List<double> State2 = new List<double>() { 40, 50, 60, 70, 80, 90, 100 }; List<double> State2_Prob = new List<double>() { 0.10, 0.28, 0.68, 0.88, 0.96, 1.00, 1.00 }; Discrete State2Disc = new Discrete(State2, State2_Prob); List<double> State3 = new List<double>() { 40, 50, 60, 70, 80, 90, 100 }; List<double> State3_Prob = new List<double>() { 0.44, 0.66, 0.82, 0.94, 1.00, 1.00, 1.00 }; Discrete State3Disc = new Discrete(State3, State3_Prob); Dictionary<double, Discrete> Demand = new Dictionary<double, Discrete>(); Demand.Add(1, State1Disc); Demand.Add(2, State2Disc); Demand.Add(3, State3Disc); List<Int64> Amount = new List<Int64>(); for (int i = 0; i < 20; i++) Amount.Add(70); Create create = new Create(env, 0, 20, CreateDist, Amount); Dispose dispose = new Dispose(env, 2); Inventory inv = new Inventory(env, 1, new TimeSpan(1), 70, StatesDisc, Demand, 0.33, 0.50, true, 0.05); create.Next_AID.Add("First", 1); inv.Next_AID.Add("First", 2); env.System_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Start_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Setup_Simulation(); env.Simulate(); double sumOfProfit = 0; for (int i = 1; i <= 20; i++) sumOfProfit += (double)inv.Statistics.OtherStatistics[i][5].StatisticValue; return sumOfProfit; }
public void Run() { // Define a range for the number of mixture components Range k = new Range(2).Named("k"); // Mixture component means VariableArray<Vector> means = Variable.Array<Vector>(k).Named("means"); means[k] = Variable.VectorGaussianFromMeanAndPrecision( Vector.FromArray(0.0,0.0), PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k); // Mixture component precisions VariableArray<PositiveDefiniteMatrix> precs = Variable.Array<PositiveDefiniteMatrix>(k).Named("precs"); precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2,0.01)).ForEach(k); // Mixture weights Variable<Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights"); // Create a variable array which will hold the data Range n = new Range(300).Named("n"); VariableArray<Vector> data = Variable.Array<Vector>(n).Named("x"); // Create latent indicator variable for each data point VariableArray<int> z = Variable.Array<int>(n).Named("z"); // The mixture of Gaussians model using (Variable.ForEach(n)) { z[n] = Variable.Discrete(weights); using (Variable.Switch(z[n])) { data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]); } } // Attach some generated data data.ObservedValue = GenerateData(n.SizeAsInt); // Initialise messages randomly so as to break symmetry Discrete[] zinit = new Discrete[n.SizeAsInt]; for (int i = 0; i < zinit.Length; i++) zinit[i] = Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt); z.InitialiseTo(Distribution<int>.Array(zinit)); // The inference InferenceEngine ie = new InferenceEngine(); if (!(ie.Algorithm is ExpectationPropagation)) { Console.WriteLine("Dist over pi=" + ie.Infer(weights)); Console.WriteLine("Dist over means=\n" + ie.Infer(means)); Console.WriteLine("Dist over precs=\n" + ie.Infer(precs)); } else Console.WriteLine("This example is not supported by Expectation Propagation"); }
public void run() { Environment env = new Environment(); // List<double> valueList = new List<double>() { 1, 2, 3, 4, 5 }; List<double> distribution = new List<double>() { 0.5, 0.6, 0.7, 0.8, 1.0 }; Discrete discrete = new Discrete(valueList, distribution, 0); // Uniform uniform = new Uniform(1, 3, 0); Distribution CreateDist = new Distribution(discrete); Distribution ResDist = new Distribution(uniform); Create c = new Create(env, 0, 4, CreateDist); Dispose dispose = new Dispose(env, 10); Queue q = new Queue(env, 5); q.Capacity = 10; Resource r = new Resource(env, 1, 1, ResDist, q); Queue q2 = new Queue(env, 6); q2.Capacity = 10; Resource r2 = new Resource(env, 2, 1, ResDist, q2); Queue q3 = new Queue(env, 7); q3.Capacity = 10; Resource r3 = new Resource(env, 3, 1, ResDist, q3); Queue q4 = new Queue(env, 8); q4.Capacity = 10; Resource r4 = new Resource(env, 4, 1, ResDist, q4); c.Next_AID.Add("First", 1); r.Next_AID.Add("First", 2); r2.Next_AID.Add("First", 3); r3.Next_AID.Add("First", 4); r4.Next_AID.Add("First", 2); env.System_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Start_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Setup_Simulation(TimeSpan.FromSeconds(300)); env.Simulate(); }
public void run() { Environment env = new Environment(); //dist List<double> valueList = new List<double>() { 1, 2, 3, 4, 5 }; List<double> distribution = new List<double>() { 0.5, 0.6, 0.7, 0.8, 1.0 }; Discrete d = new Discrete(valueList, distribution, 0); //dist1 Uniform n = new Uniform(1, 3, 0); Distribution dist = new Distribution(d); Distribution dist1 = new Distribution(n); Create c = new Create(env, 0, 10000, dist,null,new List<double>(){0.3,0.8,1.0}); Dispose di = new Dispose(env, 1); Queue q = new Queue(env, 3); q.Capacity = 1; Resource r = new Resource(env, 2, 1, dist1, q); Queue q2 = new Queue(env,5); q2.Capacity = 1; Resource r2 = new Resource(env, 6, 1, dist1, q2); Queue q3 = new Queue(env, 10); q3.Capacity = 1; Resource r3 = new Resource(env, 7, 1, dist1, q3); c.Next_AID.Add("First", 2); c.Next_AID.Add("Second", 6); c.Next_AID.Add("Third", 7); r.Next_AID.Add("First", 1); r2.Next_AID.Add("First", 1); r3.Next_AID.Add("First", 1); env.System_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Start_Time = new DateTime(1970, 1, 1, 0, 0, 0); env.Setup_Simulation(); env.Simulate(); }
} // End Constructor /// <summary> /// Paint the label Posterior Probability graph /// </summary> /// <param name="printableConfusionMatrix"></param> private void PaintLabelPosteriorProbabilityGraph(Discrete probabilityMatrix , bool isFirstTimeToAdd = false) { int labelCount = probabilityMatrix.Dimension; Series currentSeries; //initial the graph at the first time if (isFirstTimeToAdd) { currentSeries = new Series(); currentSeries.ChartArea = "Default"; currentSeries.ChartType = SeriesChartType.Column; currentSeries.IsVisibleInLegend = false; currentSeries.IsXValueIndexed = true; this.workerConfusionMatrixGraph.ChartAreas[0].AxisX.Interval = 1; this.workerConfusionMatrixGraph.Series.Add(currentSeries); } else //obtain the current series { currentSeries = workerConfusionMatrixGraph.Series[0]; } probabilityText = ""; //for each label, add the corresponding datapoint for (int currentLabelPos = 0; currentLabelPos < labelCount; currentLabelPos++) { double pointValue = probabilitiesArray[currentLabelPos]; DataPoint currentDataPoint = new DataPoint(currentLabelPos, pointValue); currentDataPoint.AxisLabel = "Label " + (currentLabelPos + 1) ; currentSeries.Points.Add(currentDataPoint); probabilityText += String.Format("{0:0.0000} ", pointValue); }//end for goldLabel textBoxTaskValue.Text = labelHeader + Environment.NewLine + probabilityText; }
public static Discrete SampleAverageConditionalInit(int trialCount) { return(Discrete.Uniform(trialCount + 1)); }
private static void discrete_cdf_test() //****************************************************************************80 // // Purpose: // // DISCRETE_CDF_TEST tests DISCRETE_CDF. // // Licensing: // // This code is distributed under the GNU LGPL license. // // Modified: // // 24 January 2007 // // Author: // // John Burkardt // { const int A = 6; double[] b = { 1.0, 2.0, 6.0, 2.0, 4.0, 1.0 } ; int i; int seed = 123456789; Console.WriteLine(""); Console.WriteLine("DISCRETE_CDF_TEST"); Console.WriteLine(" DISCRETE_CDF evaluates the Discrete CDF;"); Console.WriteLine(" DISCRETE_CDF_INV inverts the Discrete CDF."); Console.WriteLine(" DISCRETE_PDF evaluates the Discrete PDF;"); Console.WriteLine(""); Console.WriteLine(" PDF parameter A = " + A + ""); typeMethods.r8vec_print(A, b, " PDF parameter B:"); if (!Discrete.discrete_check(A, b)) { Console.WriteLine(""); Console.WriteLine("DISCRETE_CDF_TEST - Fatal error!"); Console.WriteLine(" The parameters are not legal."); return; } Console.WriteLine(""); Console.WriteLine(" X PDF CDF CDF_INV"); Console.WriteLine(""); for (i = 1; i <= 10; i++) { int x = Discrete.discrete_sample(A, b, ref seed); double pdf = Discrete.discrete_pdf(x, A, b); double cdf = Discrete.discrete_cdf(x, A, b); int x2 = Discrete.discrete_cdf_inv(cdf, A, b); Console.WriteLine(" " + x.ToString(CultureInfo.InvariantCulture).PadLeft(12) + " " + pdf.ToString(CultureInfo.InvariantCulture).PadLeft(12) + " " + cdf.ToString(CultureInfo.InvariantCulture).PadLeft(12) + " " + x2.ToString(CultureInfo.InvariantCulture).PadLeft(12) + ""); } }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/message_doc[@name="SampleAverageConditional(double, Discrete, Discrete)"]/*'/> public static Discrete SampleAverageConditional(double p, Discrete trialCount, Discrete result) { if (trialCount.IsPointMass) { return(SampleAverageConditional(p, trialCount.Point, result)); } // result must range from 0 to nMax if (result.Dimension < trialCount.Dimension) { throw new ArgumentException("result.Dimension (" + result.Dimension + ") < n.Dimension (" + trialCount.Dimension + ")"); } Vector probs = result.GetWorkspace(); double logp = Math.Log(p); double log1minusp = Math.Log(1 - p); // p(sample=k) = sum_(n>=k) p(n) nchoosek(n,k) p^k (1-p)^(n-k) // = (p/(1-p))^k 1/k! sum_(n>=k) p(n) n!/(n-k)! (1-p)^n for (int k = 0; k < result.Dimension; k++) { double s = 0.0; for (int n = k; n < trialCount.Dimension; n++) { s += trialCount[n] * Math.Exp(MMath.ChooseLn(n, k) + n * log1minusp); } probs[k] = Math.Exp(k * (logp - log1minusp)) * s; } result.SetProbs(probs); return(result); }
public void MixtureOfMultivariateGaussians() { // Define a range for the number of mixture components Range k = new Range(2).Named("k"); // Mixture component means VariableArray <Vector> means = Variable.Array <Vector>(k).Named("means"); means[k] = Variable.VectorGaussianFromMeanAndPrecision(Vector.Zero(2), PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k); // Mixture component precisions VariableArray <PositiveDefiniteMatrix> precs = Variable.Array <PositiveDefiniteMatrix>(k).Named("precs"); precs[k] = Variable.WishartFromShapeAndScale(100.0, PositiveDefiniteMatrix.IdentityScaledBy(2, 0.01)).ForEach(k); // Mixture weights Variable <Vector> weights = Variable.Dirichlet(k, new double[] { 1, 1 }).Named("weights"); // Create a variable array which will hold the data Range n = new Range(300).Named("n"); VariableArray <Vector> data = Variable.Array <Vector>(n).Named("x"); // Create latent indicator variable for each data point VariableArray <int> z = Variable.Array <int>(n).Named("z"); // The mixture of Gaussians model using (Variable.ForEach(n)) { z[n] = Variable.Discrete(weights); using (Variable.Switch(z[n])) { data[n] = Variable.VectorGaussianFromMeanAndPrecision(means[z[n]], precs[z[n]]); } } // Attach some generated data double truePi = 0.6; data.ObservedValue = GenerateData(n.SizeAsInt, truePi); // Initialise messages randomly to break symmetry VariableArray <Discrete> zInit = Variable.Array <Discrete>(n).Named("zInit"); bool useObservedValue = true; if (useObservedValue) { zInit.ObservedValue = Util.ArrayInit(n.SizeAsInt, i => Discrete.PointMass(Rand.Int(k.SizeAsInt), k.SizeAsInt)); } else { // This approach doesn't work, because Infer.NET notices that Rand.Int is stochastic and thinks that it should perform message-passing here. using (Variable.ForEach(n)) { var randk = Variable <int> .Factor(new Func <int, int>(Rand.Int), (Variable <int>) k.Size); randk.SetValueRange(k); zInit[n] = Variable <Discrete> .Factor(Discrete.PointMass, randk, (Variable <int>) k.Size); } } z[n].InitialiseTo(zInit[n]); // The inference InferenceEngine ie = new InferenceEngine(); ie.Algorithm = new VariationalMessagePassing(); //ie.Compiler.GenerateInMemory = false; //ie.NumberOfIterations = 200; Dirichlet wDist = (Dirichlet)ie.Infer(weights); Vector wEstMean = wDist.GetMean(); object meansActual = ie.Infer(means); Console.WriteLine("means = "); Console.WriteLine(meansActual); var precsActual = ie.Infer <IList <Wishart> >(precs); Console.WriteLine("precs = "); Console.WriteLine(precsActual); Console.WriteLine("w = {0} should be {1}", wEstMean, Vector.FromArray(truePi, 1 - truePi)); //Console.WriteLine(StringUtil.JoinColumns("z = ", ie.Infer(z))); Assert.True( MMath.AbsDiff(wEstMean[0], truePi) < 0.05 || MMath.AbsDiff(wEstMean[1], truePi) < 0.05); }
public void BernoulliMixtureTest() { int N = 10, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.Beta(1, 1).ForEach(k, d); VariableArray2D <bool> x = Variable.Array <bool>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = Variable.Bernoulli(p[c[n], d]); } } InferenceEngine engine = new InferenceEngine(); bool[,] data = new bool[N, D]; int N1 = N / 2; int i = 0; for (; i < N1; i++) { data[i, 0] = true; data[i, 1] = false; } for (; i < N; i++) { data[i, 0] = false; data[i, 1] = true; } x.ObservedValue = data; Discrete[] cInit = new Discrete[N]; for (int j = 0; j < N; j++) { double r = Rand.Double(); cInit[j] = new Discrete(r, 1 - r); } c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pExpected = engine.Infer(p); engine.NumberOfIterations = engine.Algorithm.DefaultNumberOfIterations; DistributionArray <Discrete> cPost = engine.Infer <DistributionArray <Discrete> >(c); Console.WriteLine(cPost); DistributionArray2D <Beta> pPost = engine.Infer <DistributionArray2D <Beta> >(p); Console.WriteLine(pPost); // test resetting inference engine.NumberOfIterations = 1; var pActual = engine.Infer <Diffable>(p); Assert.True(pActual.MaxDiff(pExpected) < 1e-10); }
/// <summary>Evidence message for EP.</summary> /// <param name="sample">Incoming message from <c>sample</c>.</param> /// <param name="selector">Incoming message from <c>selector</c>.</param> /// <param name="probs">Constant value for <c>probs</c>.</param> /// <returns>Logarithm of the factor's average value across the given argument distributions.</returns> /// <remarks> /// <para>The formula for the result is <c>log(sum_(sample,selector) p(sample,selector) factor(sample,selector,probs))</c>.</para> /// </remarks> public static double LogAverageFactor(Discrete sample, Discrete selector, Matrix probs) { return(Math.Log(probs.QuadraticForm(selector.GetProbs(), sample.GetProbs()))); }
public void BallCountingNoisy4() { // Variables describing the population int maxBalls = 8; Range ball = new Range(maxBalls).Named("ball"); Variable <int> numBalls = Variable.DiscreteUniform(maxBalls + 1).Named("numBalls"); VariableArray <bool> isBlue = Variable.Array <bool>(ball).Named("isBlue"); isBlue[ball] = Variable.Bernoulli(0.5).ForEach(ball); // Variables describing the observations Range draw = new Range(10).Named("draw"); VariableArray <bool> observedBlue = Variable.Array <bool>(draw).Named("observedBlue"); using (Variable.ForEach(draw)) { Variable <int> ballIndex = Variable.DiscreteUniform(ball, numBalls).Named("ballIndex"); if (true) { using (Variable.Switch(ballIndex)) { Variable <bool> switchedColor = Variable.Bernoulli(0.2).Named("switchedColor"); using (Variable.If(switchedColor)) { observedBlue[draw] = !isBlue[ballIndex]; } using (Variable.IfNot(switchedColor)) { observedBlue[draw] = isBlue[ballIndex]; } } } else { // an equivalent model that gives the same results Variable <bool> switchedColor = Variable.Bernoulli(0.2).Named("switchedColor"); using (Variable.If(switchedColor)) { using (Variable.Switch(ballIndex)) observedBlue[draw] = !isBlue[ballIndex]; } using (Variable.IfNot(switchedColor)) { using (Variable.Switch(ballIndex)) observedBlue[draw] = isBlue[ballIndex]; } } } bool[] data = { true, true, true, true, true, true, true, true, true, true }; observedBlue.ObservedValue = data; // Inference queries about the program // ----------------------------------- InferenceEngine engine = new InferenceEngine(); Discrete numUsersActual = engine.Infer <Discrete>(numBalls); Console.WriteLine("numUsers = {0}", numUsersActual); Discrete numUsersExpected = new Discrete(0, 0.463, 0.2354, 0.1137, 0.06589, 0.04392, 0.0322, 0.02521, 0.02068); Assert.True(numUsersExpected.MaxDiff(numUsersActual) < 1e-4); }
public static double AverageLogFactor(Bernoulli isGreaterThan, Discrete a, int b) { return 0.0; }
/// <summary> /// VMP message to 'a' /// </summary> /// <param name="isGreaterThan">Incoming message from 'isGreaterThan'. Must be a proper distribution. If uniform, the result will be uniform.</param> /// <param name="b">Incoming message from 'b'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'a'. /// Because the factor is deterministic, 'isGreaterThan' is integrated out before taking the logarithm. /// The formula is <c>exp(sum_(b) p(b) log(sum_isGreaterThan p(isGreaterThan) factor(isGreaterThan,a,b)))</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="isGreaterThan"/> is not a proper distribution</exception> public static Discrete AAverageLogarithm([SkipIfUniform] Bernoulli isGreaterThan, Discrete b, Discrete result) { if (b.IsPointMass) return AAverageLogarithm(isGreaterThan, b.Point, result); if (isGreaterThan.IsPointMass) return AAverageLogarithm(isGreaterThan.Point, b, result); // f(a,b) = p(c=1) I(a > b) + p(c=0) I(a <= b) // message to a = exp(sum_b q(b) log f(a,b)) Vector aProbs = result.GetWorkspace(); double logProbTrue = isGreaterThan.GetLogProbTrue(); double logProbFalse = isGreaterThan.GetLogProbFalse(); for (int i = 0; i < aProbs.Count; i++) { double sum = 0.0; int j = 0; for (; (j < i) && (j < b.Dimension); j++) { sum += logProbTrue*b[j]; } for (; j < b.Dimension; j++) { sum += logProbFalse*b[j]; } aProbs[i] = Math.Exp(sum); } result.SetProbs(aProbs); return result; }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/message_doc[@name="LogEvidenceRatio(int, Beta, Discrete)"]/*'/> public static double LogEvidenceRatio(int sample, Beta p, Discrete trialCount) { return(LogAverageFactor(sample, p, trialCount)); }
/// <summary> /// EP message to 'b' /// </summary> /// <param name="isGreaterThan">Incoming message from 'isGreaterThan'. Must be a proper distribution. If uniform, the result will be uniform.</param> /// <param name="a">Constant value for 'a'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'b' as the random arguments are varied. /// The formula is <c>proj[p(b) sum_(isGreaterThan) p(isGreaterThan) factor(isGreaterThan,a,b)]/p(b)</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="isGreaterThan"/> is not a proper distribution</exception> static public Discrete BAverageConditional([SkipIfUniform] Bernoulli isGreaterThan, int a, Discrete result) { Vector bProbs = result.GetWorkspace(); double probTrue = isGreaterThan.GetProbTrue(); double probFalse = 1 - probTrue; for (int j = 0; j < bProbs.Count; j++) { bProbs[j] = (a > j) ? probTrue : probFalse; } result.SetProbs(bProbs); return result; }
public static double LogEvidenceRatio(Discrete sample) { return(0.0); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/message_doc[@name="TrialCountAverageLogarithm(int, double, Discrete)"]/*'/> public static Discrete TrialCountAverageLogarithm(int sample, double p, Discrete result) { return(TrialCountAverageConditional(sample, p, result)); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/message_doc[@name="TrialCountAverageConditional(Discrete, double, Discrete)"]/*'/> public static Discrete TrialCountAverageConditional(Discrete sample, double p, Discrete result) { if (sample.IsPointMass) { return(TrialCountAverageConditional(sample.Point, p, result)); } // n must range from 0 to sampleMax if (result.Dimension < sample.Dimension) { throw new ArgumentException("result.Dimension (" + result.Dimension + ") < sample.Dimension (" + sample.Dimension + ")"); } Vector probs = result.GetWorkspace(); double logp = Math.Log(p); double log1minusp = Math.Log(1 - p); // p(n) = sum_(k<=n) p(k) nchoosek(n,k) p^k (1-p)^(n-k) for (int n = 0; n < result.Dimension; n++) { double s = 0.0; for (int k = 0; k <= n; k++) { s += sample[k] * Math.Exp(MMath.ChooseLn(n, k) + k * (logp - log1minusp)); } probs[n] = Math.Exp(n * log1minusp) * s; } result.SetProbs(probs); return(result); }
/// <summary>EP message to <c>selector</c>.</summary> /// <param name="sample">Incoming message from <c>sample</c>.</param> /// <param name="probs">Constant value for <c>probs</c>.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns> /// <paramref name="result" /> /// </returns> /// <remarks> /// <para>The outgoing message is a distribution matching the moments of <c>selector</c> as the random arguments are varied. The formula is <c>proj[p(selector) sum_(sample) p(sample) factor(sample,selector,probs)]/p(selector)</c>.</para> /// </remarks> public static Discrete SelectorAverageConditional(Discrete sample, Matrix probs, Discrete result) { Vector v = result.GetWorkspace(); v.SetToProduct(probs, sample.GetProbs()); result.SetProbs(v); return(result); }
/// <summary> /// VMP message to 'b' /// </summary> /// <param name="isGreaterThan">Incoming message from 'isGreaterThan'. Must be a proper distribution. If uniform, the result will be uniform.</param> /// <param name="a">Constant value for 'a'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'b' with 'isGreaterThan' integrated out. /// The formula is <c>sum_isGreaterThan p(isGreaterThan) factor(isGreaterThan,a,b)</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="isGreaterThan"/> is not a proper distribution</exception> public static Discrete BAverageLogarithm([SkipIfUniform] Bernoulli isGreaterThan, int a, Discrete result) { return BAverageConditional(isGreaterThan, a, result); }
/// <summary>EP message to <c>sample</c>.</summary> /// <param name="selector">Incoming message from <c>selector</c>.</param> /// <param name="probs">Constant value for <c>probs</c>.</param> /// <param name="result">Modified to contain the outgoing message.</param> /// <returns> /// <paramref name="result" /> /// </returns> /// <remarks> /// <para>The outgoing message is a distribution matching the moments of <c>sample</c> as the random arguments are varied. The formula is <c>proj[p(sample) sum_(selector) p(selector) factor(sample,selector,probs)]/p(sample)</c>.</para> /// </remarks> public static Discrete SampleAverageConditional(Discrete selector, Matrix probs, Discrete result) { Vector v = result.GetWorkspace(); v.SetToProduct(selector.GetProbs(), probs); result.SetProbs(v); return(result); }
/// <summary> /// Evidence message for VMP /// </summary> /// <param name="isGreaterThan">Constant value for 'isGreaterThan'.</param> /// <param name="a">Incoming message from 'a'.</param> /// <param name="b">Constant value for 'b'.</param> /// <returns>Zero</returns> /// <remarks><para> /// In Variational Message Passing, the evidence contribution of a deterministic factor is zero. /// Adding up these values across all factors and variables gives the log-evidence estimate for VMP. /// </para></remarks> public static double AverageLogFactor(bool isGreaterThan, Discrete a, int b) { return LogAverageFactor(isGreaterThan, a, b); }
public void StringFormatTest4() { // number of templates var T = new Range(2).Named("T"); var templates = Variable.Array <string>(T).Named("templates"); templates[T] = Variable.Random(StringDistribution.Any()).ForEach(T); // number of objects var J = new Range(2).Named("J"); var names = Variable.Array <string>(J).Named("names"); names[J] = Variable.Random(WordString()).ForEach(J); // number of strings var N = new Range(3).Named("N"); var objectNumber = Variable.Array <int>(N).Named("objectNumber"); var templateNumber = Variable.Array <int>(N).Named("templateNumber"); var texts = Variable.Array <string>(N).Named("b"); using (Variable.ForEach(N)) { objectNumber[N] = Variable.DiscreteUniform(J); var name = Variable.New <string>().Named("name"); using (Variable.Switch(objectNumber[N])) { name.SetTo(Variable.Copy(names[objectNumber[N]])); } templateNumber[N] = Variable.DiscreteUniform(T); using (Variable.Switch(templateNumber[N])) { texts[N] = Variable.StringFormat(templates[templateNumber[N]], name); } } Variable.ConstrainEqual(names[0], "John"); Variable.ConstrainEqual(templateNumber[0], 0); // break symmetry // Initialise templateNumber to break symmetry Rand.Restart(0); var tempNumInit = new Discrete[N.SizeAsInt]; for (var i = 0; i < tempNumInit.Length; i++) { tempNumInit[i] = Discrete.PointMass(Rand.Int(T.SizeAsInt), T.SizeAsInt); } templateNumber.InitialiseTo(Distribution <int> .Array(tempNumInit)); var engine = new InferenceEngine(); engine.NumberOfIterations = 15; engine.Compiler.RecommendedQuality = QualityBand.Experimental; engine.Compiler.GivePriorityTo(typeof(ReplicateOp_NoDivide)); engine.Compiler.UnrollLoops = true; texts.ObservedValue = new string[] { "My name is John", "I'm John", "I'm Tom" }; Console.WriteLine("templateNumber: \n" + engine.Infer(templateNumber)); Console.WriteLine("objectNumber: \n" + engine.Infer(objectNumber)); Console.WriteLine("templates: \n" + engine.Infer(templates)); Console.WriteLine("names: \n" + engine.Infer(names)); }
/// <summary> /// VMP message to 'a' /// </summary> /// <param name="isGreaterThan">Constant value for 'isGreaterThan'.</param> /// <param name="b">Constant value for 'b'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'a' conditioned on the given values. /// </para></remarks> public static Discrete AAverageLogarithm(bool isGreaterThan, int b, Discrete result) { return AAverageConditional(isGreaterThan, b, result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="UnaryOp{DomainType}"]/message_doc[@name="RandomMaxConditional(Discrete)"]/*'/> public static UnnormalizedDiscrete RandomMaxConditional([SkipIfUniform] Discrete dist) { return(UnnormalizedDiscrete.FromDiscrete(dist)); }
/// <summary> /// Evidence message for EP /// </summary> /// <param name="isGreaterThan">Incoming message from 'isGreaterThan'.</param> /// <param name="a">Constant value for 'a'.</param> /// <param name="b">Incoming message from 'b'.</param> /// <param name="to_isGreaterThan">Outgoing message to 'isGreaterThan'.</param> /// <returns>Logarithm of the factor's average value across the given argument distributions</returns> /// <remarks><para> /// The formula for the result is <c>log(sum_(isGreaterThan,b) p(isGreaterThan,b) factor(isGreaterThan,a,b))</c>. /// </para></remarks> public static double LogAverageFactor(Bernoulli isGreaterThan, int a, Discrete b, [Fresh] Bernoulli to_isGreaterThan) { return to_isGreaterThan.GetLogAverageOf(isGreaterThan); }
public void PoissonMixtureTest() { Rand.Restart(1); int N = 40, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.GammaFromMeanAndVariance(10, 100).ForEach(k, d); VariableArray2D <int> x = Variable.Array <int>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = Variable.Poisson(p[c[n], d]); } } //n.AddAttribute(new Sequential()); //c.AddAttribute(new DivideMessages(false)); InferenceEngine engine = new InferenceEngine(); //engine.Algorithm = new VariationalMessagePassing(); int[,] data = new int[N, D]; int N1 = N / 2; double[,] mean = new double[K, D]; for (int i = 0; i < K; i++) { for (int j = 0; j < D; j++) { //mean[i, j] = i+j; mean[i, j] = (i + j + 1) * 10; } } Discrete[] cInit = new Discrete[N]; for (int i = 0; i < N; i++) { int cluster = i % 2; for (int j = 0; j < D; j++) { data[i, j] = Rand.Poisson(mean[cluster, j]); } double r = cluster; cInit[i] = new Discrete(1 - r, r); } x.ObservedValue = data; c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pPost1 = engine.Infer(p); engine.NumberOfIterations = 200; Gamma[,] pPost = engine.Infer <Gamma[, ]>(p); for (int i = 0; i < pPost.GetLength(0); i++) { for (int j = 0; j < pPost.GetLength(1); j++) { double mActual = pPost[i, j].GetMean(); double mExpected = mean[i, j]; Console.WriteLine(String.Format("pPost[{0}][{1}] = {2} should be {3}", i, j, mActual, mExpected)); Assert.True(MMath.AbsDiff(mExpected, mActual, 1e-6) < 0.3); } } // test resetting inference engine.NumberOfIterations = 1; var pPost2 = engine.Infer <Diffable>(p); Assert.True(pPost2.MaxDiff(pPost1) < 1e-10); }
public static Discrete SampleAverageConditionalInit([IgnoreDependency] Discrete trialCount) { return(Discrete.Uniform(trialCount.Dimension)); }
public void BernoulliMixtureGaussianTest() { int N = 10, D = 2, K = 2; Range n = new Range(N).Named("n"); Range k = new Range(K).Named("k"); Range d = new Range(D).Named("d"); VariableArray2D <double> p = Variable.Array <double>(k, d).Named("p"); p[k, d] = Variable.GaussianFromMeanAndVariance(0, 1).ForEach(k, d); VariableArray2D <bool> x = Variable.Array <bool>(n, d).Named("x"); VariableArray <int> c = Variable.Array <int>(n).Named("c"); using (Variable.ForEach(n)) { c[n] = Variable.Discrete(k, 0.5, 0.5); using (Variable.Switch(c[n])) { x[n, d] = (Variable.GaussianFromMeanAndVariance(p[c[n], d], 1.0) > 0); } } bool geForceProper = GateEnterOp <double> .ForceProper; try { GateEnterOp <double> .ForceProper = true; InferenceEngine engine = new InferenceEngine(); //new VariationalMessagePassing()); engine.Compiler.GivePriorityTo(typeof(IsPositiveOp_Proper)); // needed to avoid improper messages in EP bool[,] data = new bool[N, D]; int N1 = N / 2; int i = 0; for (; i < N1; i++) { data[i, 0] = true; data[i, 1] = false; } for (; i < N; i++) { data[i, 0] = false; data[i, 1] = true; } x.ObservedValue = data; Discrete[] cInit = new Discrete[N]; for (int j = 0; j < N; j++) { double r = Rand.Double(); cInit[j] = new Discrete(r, 1 - r); } c.InitialiseTo(Distribution <int> .Array(cInit)); engine.NumberOfIterations = 1; var pExpected = engine.Infer(p); engine.NumberOfIterations = engine.Algorithm.DefaultNumberOfIterations; DistributionArray <Discrete> cPost = engine.Infer <DistributionArray <Discrete> >(c); Console.WriteLine(cPost); DistributionArray2D <Gaussian> pPost = engine.Infer <DistributionArray2D <Gaussian> >(p); Console.WriteLine(pPost); // test resetting inference engine.NumberOfIterations = 1; var pActual = engine.Infer <Diffable>(p); Assert.True(pActual.MaxDiff(pExpected) < 1e-10); } finally { GateEnterOp <double> .ForceProper = geForceProper; } }
private static void discrete_sample_test() //****************************************************************************80 // // Purpose: // // DISCRETE_SAMPLE_TEST tests DISCRETE_SAMPLE. // // Licensing: // // This code is distributed under the GNU LGPL license. // // Modified: // // 24 January 2007 // // Author: // // John Burkardt // { const int A = 6; const int SAMPLE_NUM = 1000; double[] b = { 1.0, 2.0, 6.0, 2.0, 4.0, 1.0 } ; int i; int seed = 123456789; int[] x = new int[SAMPLE_NUM]; Console.WriteLine(""); Console.WriteLine("DISCRETE_SAMPLE_TEST"); Console.WriteLine(" DISCRETE_MEAN computes the Discrete mean;"); Console.WriteLine(" DISCRETE_SAMPLE samples the Discrete distribution;"); Console.WriteLine(" DISCRETE_VARIANCE computes the Discrete variance;"); Console.WriteLine(""); Console.WriteLine(" PDF parameter A = " + A + ""); typeMethods.r8vec_print(A, b, " PDF parameter B:"); if (!Discrete.discrete_check(A, b)) { Console.WriteLine(""); Console.WriteLine("DISCRETE_SAMPLE_TEST - Fatal error!"); Console.WriteLine(" The parameters are not legal."); return; } double mean = Discrete.discrete_mean(A, b); double variance = Discrete.discrete_variance(A, b); Console.WriteLine(""); Console.WriteLine(" PDF mean = " + mean + ""); Console.WriteLine(" PDF variance = " + variance + ""); for (i = 0; i < SAMPLE_NUM; i++) { x[i] = Discrete.discrete_sample(A, b, ref seed); } mean = typeMethods.i4vec_mean(SAMPLE_NUM, x); variance = typeMethods.i4vec_variance(SAMPLE_NUM, x); int xmax = typeMethods.i4vec_max(SAMPLE_NUM, x); int xmin = typeMethods.i4vec_min(SAMPLE_NUM, x); Console.WriteLine(""); Console.WriteLine(" Sample size = " + SAMPLE_NUM + ""); Console.WriteLine(" Sample mean = " + mean + ""); Console.WriteLine(" Sample variance = " + variance + ""); Console.WriteLine(" Sample maximum = " + xmax + ""); Console.WriteLine(" Sample minimum = " + xmin + ""); }
public void IndexOfMaximumFastTest() { int n = 5; Range item = new Range(n).Named("item"); var priors = Variable <Gaussian> .Array(item); priors.ObservedValue = Util.ArrayInit(n, i => Gaussian.FromMeanAndVariance(i * 0.5, i)); var x = Variable.Array <double>(item).Named("x"); x[item] = Variable <double> .Random(priors[item]); var y = Variable <int> .Factor(MMath.IndexOfMaximumDouble, x); InferenceEngine engine = new InferenceEngine(); engine.ShowProgress = false; string format = "f4"; var yActual = engine.Infer <Discrete>(y); Console.WriteLine("Quadratic: {0}", yActual.ToString(format)); // Monte Carlo estimate Rand.Restart(0); DiscreteEstimator est = new DiscreteEstimator(n); for (int iter = 0; iter < 100000; iter++) { double[] samples = Util.ArrayInit(n, i => priors.ObservedValue[i].Sample()); int argmax = MMath.IndexOfMaximumDouble(samples); est.Add(argmax); } var yExpected = est.GetDistribution(Discrete.Uniform(n)); Console.WriteLine("Sampling: {0}", yExpected.ToString(format)); Assert.True(yExpected.MaxDiff(yActual) < 1e-2); engine.Compiler.GivePriorityTo(typeof(IndexOfMaximumOp_Fast)); yActual = engine.Infer <Discrete>(y); Console.WriteLine("Linear: {0}", yActual.ToString(format)); Assert.True(yExpected.MaxDiff(yActual) < 1e-2); bool compareApproximation = false; if (compareApproximation) { var yPost2 = IndexOfMaximumOp_Fast.IndexOfMaximumDoubleAverageConditional(priors.ObservedValue, Discrete.Uniform(n)); Console.WriteLine(yPost2); var yPost3 = IndexOfMaximumOp_Fast.IndexOfMaximumDoubleAverageConditional2(priors.ObservedValue, Discrete.Uniform(n)); Console.WriteLine(yPost3); } }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BinomialOp"]/message_doc[@name="LogAverageFactor(Discrete, Discrete)"]/*'/> public static double LogAverageFactor(Discrete sample, [Fresh] Discrete to_sample) { return(to_sample.GetLogAverageOf(sample)); }
/// <summary> /// EP message to 'b' /// </summary> /// <param name="isGreaterThan">Constant value for 'isGreaterThan'.</param> /// <param name="a">Constant value for 'a'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'b' conditioned on the given values. /// </para></remarks> public static Discrete BAverageConditional(bool isGreaterThan, int a, Discrete result) { return BAverageConditional(Bernoulli.PointMass(isGreaterThan), a, result); }
public DiscreteTests() { _discrete = new Discrete <int>(Name, DefaultUnit); }
/// <summary> /// VMP message to 'isGreaterThan' /// </summary> /// <param name="a">Incoming message from 'a'.</param> /// <param name="b">Incoming message from 'b'.</param> /// <returns>The outgoing VMP message to the 'isGreaterThan' argument</returns> /// <remarks><para> /// The outgoing message is a distribution matching the moments of 'isGreaterThan' as the random arguments are varied. /// The formula is <c>proj[sum_(a,b) p(a,b) factor(isGreaterThan,a,b)]</c>. /// </para></remarks> public static Bernoulli IsGreaterThanAverageLogarithm(Discrete a, Discrete b) { return IsGreaterThanAverageConditional(a, b); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BernoulliFromDiscreteOp"]/message_doc[@name="IndexAverageConditional(Bernoulli, double[], Discrete)"]/*'/> public static Discrete IndexAverageConditional([SkipIfUniform] Bernoulli sample, double[] ProbTrue, Discrete result) { if (result == default(Discrete)) { result = Discrete.Uniform(ProbTrue.Length); } // p(Y) = ProbTrue[Y]*p(X=true) + (1-ProbTrue[Y])*p(X=false) Vector probs = result.GetWorkspace(); double p = sample.GetProbTrue(); probs.SetTo(ProbTrue); probs.SetToProduct(probs, 2.0 * p - 1.0); probs.SetToSum(probs, 1.0 - p); result.SetProbs(probs); return(result); }
/// <summary> /// VMP message to 'b' /// </summary> /// <param name="isGreaterThan">Incoming message from 'isGreaterThan'. Must be a proper distribution. If uniform, the result will be uniform.</param> /// <param name="a">Incoming message from 'a'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the exponential of the average log-factor value, where the average is over all arguments except 'b'. /// Because the factor is deterministic, 'isGreaterThan' is integrated out before taking the logarithm. /// The formula is <c>exp(sum_(a) p(a) log(sum_isGreaterThan p(isGreaterThan) factor(isGreaterThan,a,b)))</c>. /// </para></remarks> /// <exception cref="ImproperMessageException"><paramref name="isGreaterThan"/> is not a proper distribution</exception> public static Discrete BAverageLogarithm([SkipIfUniform] Bernoulli isGreaterThan, Discrete a, Discrete result) { if (a.IsPointMass) return BAverageLogarithm(isGreaterThan, a.Point, result); if (isGreaterThan.IsPointMass) return BAverageLogarithm(isGreaterThan.Point, a, result); // f(a,b) = p(c=1) I(a > b) + p(c=0) I(a <= b) // message to b = exp(sum_a q(a) log f(a,b)) Vector bProbs = result.GetWorkspace(); double logProbTrue = isGreaterThan.GetLogProbTrue(); double logProbFalse = isGreaterThan.GetLogProbFalse(); for (int j = 0; j < bProbs.Count; j++) { double sum = 0.0; int i = 0; for (; (i <= j) && (i < a.Dimension); i++) { sum += logProbFalse*a[i]; } for (; i < a.Dimension; i++) { sum += logProbTrue*a[i]; } bProbs[j] = Math.Exp(sum); } result.SetProbs(bProbs); return result; }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BernoulliFromDiscreteOp"]/message_doc[@name="IndexAverageLogarithm(Bernoulli, double[], Discrete)"]/*'/> public static Discrete IndexAverageLogarithm(Bernoulli sample, double[] ProbTrue, Discrete result) { if (result == default(Discrete)) { result = Discrete.Uniform(ProbTrue.Length); } // E[sum_k I(Y=k) (X*log(ProbTrue[k]) + (1-X)*log(1-ProbTrue[k]))] // = sum_k I(Y=k) (p(X=true)*log(ProbTrue[k]) + p(X=false)*log(1-ProbTrue[k])) // p(Y=k) =propto ProbTrue[k]^p(X=true) (1-ProbTrue[k])^p(X=false) Vector probs = result.GetWorkspace(); double p = sample.GetProbTrue(); probs.SetTo(ProbTrue); probs.SetToFunction(probs, x => Math.Pow(x, p) * Math.Pow(1.0 - x, 1.0 - p)); result.SetProbs(probs); return(result); }
public static double AverageLogFactor(bool isGreaterThan, Discrete a, Discrete b) { throw new NotSupportedException(NotSupportedMessage); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BernoulliFromDiscreteOp"]/message_doc[@name="LogEvidenceRatio(bool, Discrete, double[])"]/*'/> public static double LogEvidenceRatio(bool sample, Discrete index, double[] probTrue) { return(LogAverageFactor(sample, index, probTrue)); }
public static Discrete BAverageLogarithm(bool isGreaterThan, Discrete a, Discrete result) { throw new NotSupportedException(NotSupportedMessage); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="BernoulliFromDiscreteOp"]/message_doc[@name="IndexAverageLogarithm(bool, double[], Discrete)"]/*'/> public static Discrete IndexAverageLogarithm(bool sample, double[] ProbTrue, Discrete result) { return(IndexConditional(sample, ProbTrue, result)); }
/// <summary> /// VMP message to 'b' /// </summary> /// <param name="isGreaterThan">Constant value for 'isGreaterThan'.</param> /// <param name="a">Constant value for 'a'.</param> /// <param name="result">Modified to contain the outgoing message</param> /// <returns><paramref name="result"/></returns> /// <remarks><para> /// The outgoing message is the factor viewed as a function of 'b' conditioned on the given values. /// </para></remarks> public static Discrete BAverageLogarithm(bool isGreaterThan, int a, Discrete result) { return BAverageConditional(isGreaterThan, a, result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IsGreaterThanOp"]/message_doc[@name="LogEvidenceRatio(bool, Discrete, int)"]/*'/> public static double LogEvidenceRatio(bool isGreaterThan, Discrete a, int b) { return(LogAverageFactor(isGreaterThan, a, b)); }
/// <summary> /// Background Thread for updating the worker confusion matrix /// </summary> /// <param name="sender"></param> /// <param name="e"></param> private void backgroundWorkerUpdateConfusionMatrix_DoWork(object sender, DoWorkEventArgs e) { //Get the Backgroundworker System.ComponentModel.BackgroundWorker worker; worker = (System.ComponentModel.BackgroundWorker)sender; //listen the main page while the experiment is running while (!MainPage.mainPageForm.isExperimentComplete) { while (MainPage.mainPageForm.isExperimentRunning) { probabilitiesArray = MainPage.mainPageForm.currentExperimentSetting.GetTaskTrueLabel(indexOfExperimentItem, taskId); try { //notify the graph to change worker.ReportProgress(0, null); } catch (Exception) { break; } //Check update after a period of time System.Threading.Thread.Sleep(500); } //End while Experiment is running } //End While Experiment is completed } //End BackgroundUpdatingThread Method
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IsGreaterThanOp"]/message_doc[@name="AAverageConditional(Bernoulli, Discrete, Discrete)"]/*'/> public static Discrete AAverageConditional([SkipIfUniform] Bernoulli isGreaterThan, Discrete b, Discrete result) { if (b.IsPointMass) { return(AAverageConditional(isGreaterThan, b.Point, result)); } Vector aProbs = result.GetWorkspace(); double probTrue = isGreaterThan.GetProbTrue(); double probFalse = 1 - probTrue; for (int i = 0; i < aProbs.Count; i++) { double sum1 = 0.0; int j = 0; for (; (j < i) && (j < b.Dimension); j++) { sum1 += b[j]; } double sum0 = 0.0; for (; j < b.Dimension; j++) { sum0 += b[j]; } aProbs[i] = probTrue * sum1 + probFalse * sum0; } result.SetProbs(aProbs); return(result); }
/// <include file='FactorDocs.xml' path='factor_docs/message_op_class[@name="IsGreaterThanOp"]/message_doc[@name="AAverageConditional(Bernoulli, int, Discrete)"]/*'/> public static Discrete AAverageConditional([SkipIfUniform] Bernoulli isGreaterThan, int b, Discrete result) { double probTrue = isGreaterThan.GetProbTrue(); double probFalse = 1 - probTrue; Vector aProbs = result.GetWorkspace(); for (int i = 0; i < aProbs.Count; i++) { aProbs[i] = (i > b) ? probTrue : probFalse; } result.SetProbs(aProbs); return(result); }
public static Discrete SampleAverageLogarithmInit([IgnoreDependency] Dirichlet probs) { return(Discrete.Uniform(probs.Dimension)); }