public void Init(LLNAModel model) { var phiVal = (double)1 / (double)model.K; var logPhiVal = -Math.Log(model.K); Phi.SetValue(phiVal); LogPhi.SetValue(logPhiVal); Zeta = 10; int i = 0; for (i = 0; i < (model.K - 1); i++) { Nu[i] = 10; Lambda[i] = 0; } Nu[i] = 0; Lambda[i] = 0; NIter = 0; LHood = 0; for (i = 0; i < 4; i++) { _temp.Add(Vector <double> .Build.Dense(model.K - 1)); } }
public void OptimizePhi(LLNAModel model, CorpusDocument doc) { // Compute phi proportion in log space. for (int n = 0; n < doc.NbTerms; n++) { double logSumN = 0; for (int i = 0; i < model.K; i++) { var word = doc.GetWord(n); LogPhi[n, i] = Lambda[i] + model.LogBeta[i, word]; if (i == 0) { logSumN = LogPhi[n, i]; } else { logSumN = MathHelper.LogSum(logSumN, LogPhi[n, i]); } } for (int i = 0; i < model.K; i++) { LogPhi[n, i] = LogPhi[n, i] - logSumN; Phi[n, i] = Math.Exp(LogPhi[n, i]); } } }
public void OptimizeNu(LLNAModel model, CorpusDocument doc) { for (int i = 0; i < model.K - 1; i++) { double initNu = 10; double nuI = 0, logNuI = 0, df = 0, d2f = 0; int iter = 0; logNuI = Math.Log(initNu); do { iter++; nuI = Math.Exp(logNuI); if (double.IsNaN(nuI)) { initNu = initNu * 2; logNuI = Math.Log(initNu); nuI = initNu; } df = -(model.InvCovariance[i, i] * 0.5) - (0.5 * ((doc.Total / Zeta) * Math.Exp(Lambda[i] + nuI / 2))) + (0.5 * (1 / nuI)); d2f = -(0.25 * (doc.Total / Zeta) * Math.Exp(Lambda[i] + nuI / 2)) - (0.5 * (1 / (nuI * nuI))); logNuI = logNuI - (df * nuI) / (d2f * nuI * nuI + df * nuI); }while (Math.Abs(df) > NEWTON_THRESH); Nu[i] = Math.Exp(logNuI); } }
public void OptimizeZeta(LLNAModel model) { Zeta = 1; for (int i = 0; i < model.K - 1; i++) { Zeta += Math.Exp(Lambda[i] + 0.5 * (Nu[i])); } }
public LLNASufficientStatistics(LLNAModel model) { MuSufficientStatistic = new double[model.K - 1]; CovarianceSufficientStatistic = Matrix <double> .Build.Dense(model.K - 1, model.K - 1, 0); BetaSufficientStatistic = Matrix <double> .Build.Dense(model.K, model.LogBeta.ColumnCount, 0); NData = 0; }
public Bundle(int k, CorpusDocument document, LLNAModel model, VariationalInferenceParameter inference) { SumPhi = Vector <double> .Build.Dense(k - 1, 0); Document = document; Model = model; VarInference = inference; Init(k); }
public void OptimizeLambda(LLNAModel model, CorpusDocument doc) { int i = 0, n = model.K - 1, iter = 0; double fOld = 0; var bundle = new Bundle(model.K, doc, model, this); var x = Vector <double> .Build.Dense(model.K - 1); for (i = 0; i < model.K - 1; i++) { x[i] = Lambda[i]; } /* * var iterator = new MultidimensionalIterator(n); * var fdf = new MultidimensionalFunctionFDF * { * F = (vector) => * { * return ComputeFunction(vector, bundle); * }, * DF = (vector) => * { * return ComputeGradient(vector, bundle); * }, * N = n * }; * var multidimensionalMinimizer = new MultidimensionalMinimizer(n, iterator, fdf); * multidimensionalMinimizer.Set(x, 0.01, 1e-3); * do * { * iter++; * fOld = multidimensionalMinimizer.F; * multidimensionalMinimizer.Iterate(); * } * while (true); */ var obj = ObjectiveFunction.Gradient((vector) => { return(ComputeFunction(vector, bundle)); }, (vector) => { return(ComputeGradient(vector, bundle)); }); var solver = new ConjugateGradientMinimizer(1e-8, 5000); var result = solver.FindMinimum(obj, x); for (i = 0; i < model.K - 1; i++) { Lambda[i] = result.MinimizingPoint[i]; } Lambda[i] = 0; }
public static LLNAModel Create(int k, Corpus corpus, CorrelatedTopicModelParameters parameters) { var result = new LLNAModel(parameters) { K = k, Mu = Vector <double> .Build.Dense(k - 1), LogBeta = Matrix <double> .Build.Dense(k, corpus.VocabularySize, 0), }; result.Covariance = Matrix <double> .Build.DenseIdentity(k - 1, k - 1); result.InvCovariance = result.Covariance.Inverse(); result.InvCovariance.Display(); result.LogDeterminantInvCovariance = result.InvCovariance.ComputeLNDeterminant(); result.Init(corpus); return(result); }
public void UpdateLikelihoodBound(CorpusDocument doc, LLNAModel model) { TopicScores.SetValue(0); // p(η | μ, Σ) (distribution of topic proportions) && q(η | λ, ν) // E[log p(\eta | \mu, \Sigma)] + H(q(\eta | \lambda, \nu) double lHood = 0.5 * model.LogDeterminantInvCovariance + 0.5 * (model.K - 1); for (int i = 0; i < (model.K - 1); i++) { double v = -0.5 * Nu[i] * model.InvCovariance[i, i]; for (int j = 0; j < model.K - 1; j++) { v -= 0.5 * (Lambda[i] - model.Mu[i]) * model.InvCovariance[i, j] * (Lambda[j] - model.Mu[j]); } v += 0.5 * Math.Log(Nu[i]); lHood += v; } // E[log p(z_n | \eta)] + E[log p(w_n | \beta)] + H(q(z_n | \phi_n)) lHood -= CalculateLikelihoodBound() * doc.Total; for (int i = 0; i < doc.NbTerms; i++) { for (int j = 0; j < model.K; j++) { var phi = Phi[i, j]; var logPhi = LogPhi[i, j]; if (phi > 0) { TopicScores[j] += phi * doc.GetCount(i); double a1 = Lambda[j]; int ww = doc.GetWord(i); double a2 = model.LogBeta[j, doc.GetWord(i)]; var ss = doc.GetCount(i) * phi * ((Lambda[j] + model.LogBeta[j, doc.GetWord(i)]) - logPhi); lHood += ss; } } } LHood = lHood; }