/// <summary> /// Initializes the CBCC model with a number of communities. /// </summary> /// <param name="taskCount">The number of tasks.</param> /// <param name="labelCount">The number of labels.</param> /// <param name="communityCount">The number of communities.</param> public virtual void CreateModel(int taskCount, int labelCount, int communityCount) { Evidence = Variable <bool> .Random(this.EvidencePrior); var evidenceBlock = Variable.If(Evidence); CommunityCount = communityCount; CommunityProbPriorObserved = Dirichlet.Symmetric(communityCount, CommunityPseudoCount); DefineVariablesAndRanges(taskCount, labelCount); DefineGenerativeProcess(); DefineInferenceEngine(); evidenceBlock.CloseBlock(); if (ScoreMeanParameters == null) { var scoreMatrixPrior = GetScoreMatrixPrior(); CommunityScoreMatrixPriorObserved = Util.ArrayInit(CommunityCount, comm => Util.ArrayInit(labelCount, lab => new VectorGaussian(scoreMatrixPrior[lab]))); } else { CommunityScoreMatrixPriorObserved = Util.ArrayInit( CommunityCount, comm => Util.ArrayInit( labelCount, lab => VectorGaussian.FromMeanAndPrecision( Vector.FromArray( Util.ArrayInit(labelCount, lab1 => lab == lab1 ? ScoreMeanParameters[comm].Item1 : ScoreMeanParameters[comm].Item2)), PositiveDefiniteMatrix.IdentityScaledBy(LabelCount, ScorePrecisionParameters[comm])))); } }
/// <summary> /// Generates a data set from a particular true model. /// </summary> public Vector[] GenerateData(int nData) { Vector trueM1 = Vector.FromArray(2.0, 3.0); Vector trueM2 = Vector.FromArray(7.0, 5.0); PositiveDefiniteMatrix trueP1 = new PositiveDefiniteMatrix( new double[, ] { { 3.0, 0.2 }, { 0.2, 2.0 } }); PositiveDefiniteMatrix trueP2 = new PositiveDefiniteMatrix( new double[, ] { { 2.0, 0.4 }, { 0.4, 4.0 } }); VectorGaussian trueVG1 = VectorGaussian.FromMeanAndPrecision(trueM1, trueP1); VectorGaussian trueVG2 = VectorGaussian.FromMeanAndPrecision(trueM2, trueP2); double truePi = 0.6; Bernoulli trueB = new Bernoulli(truePi); // Restart the infer.NET random number generator Rand.Restart(12347); Vector[] data = new Vector[nData]; for (int j = 0; j < nData; j++) { bool bSamp = trueB.Sample(); data[j] = bSamp ? trueVG1.Sample() : trueVG2.Sample(); } return(data); }
public static Vector VectorGaussianScaled(double scaling, PositiveDefiniteMatrix precision) { var scaledPrec = new PositiveDefiniteMatrix(precision); scaledPrec.Scale(scaling); return(VectorGaussian.FromMeanAndPrecision(Vector.Zero(precision.Cols), scaledPrec).Sample()); }
private void VectorGaussianModel() { Vector mm = Vector.FromArray(0.1, 0.2); Vector m = Factor.Random(VectorGaussian.FromMeanAndPrecision(mm, PositiveDefiniteMatrix.Identity(2))); PositiveDefiniteMatrix p = Factor.Random(Wishart.FromShapeAndRate(2, 1.0, 1.0)); Vector c = Factor.VectorGaussian(m, p); InferNet.Infer(c, nameof(c)); }
private void MatrixVectorProductModel() { Matrix a = new Matrix(new double[, ] { { 1, 2, 3 }, { 4, 5, 6 } }); Vector m = Vector.FromArray(0.1, 1.2, 2.3); Vector b = Factor.Random(VectorGaussian.FromMeanAndPrecision(m, PositiveDefiniteMatrix.Identity(3))); Vector c = Factor.Product(a, b); InferNet.Infer(c, nameof(c)); }
//Addition by Guy Templeton, get log evidence from learned mixing coeff. public double GetLogEvidence() { Variable <bool> evidence = Variable.Bernoulli(0.5).Named("evidence"); Range classes = new Range(numOfClasses); IfBlock block = Variable.If(evidence); VectorGaussian[] wObserved = trainModel.wPrior.ObservedValue; VectorGaussian[] empty = Util.ArrayInit(numOfClasses, c => (c == 0) ? VectorGaussian.PointMass(Vector.Zero(numOfFeatures)) : VectorGaussian.FromMeanAndPrecision(Vector.Zero(numOfFeatures), PositiveDefiniteMatrix.Identity(numOfFeatures))); block.CloseBlock(); InferenceEngine engine = new InferenceEngine(); return(engine.Infer <Bernoulli>(evidence).LogOdds); }
private VectorGaussian[] InitializePrior(int numClasses, int numFeatures) { return(Util.ArrayInit(numClasses, c => (c == 0) ? VectorGaussian.PointMass(Vector.Zero(numFeatures)) : VectorGaussian.FromMeanAndPrecision(Vector.Zero(numFeatures), PositiveDefiniteMatrix.Identity(numFeatures)))); }