// FROM trainer public QNModel(LogLikelihoodFunction monitor, double[] parameters) : base(null, monitor.PredLabels, monitor.OutcomeLabels) { int[][] outcomePatterns = monitor.OutcomePatterns; Context[] cParameters = new Context[monitor.PredLabels.Length]; for (int ci = 0; ci < parameters.Length; ci++) { int[] outcomePattern = outcomePatterns[ci]; double[] alpha = new double[outcomePattern.Length]; for (int oi = 0; oi < outcomePattern.Length; oi++) { alpha[oi] = parameters[ci + (outcomePattern[oi] * monitor.PredLabels.Length)]; } cParameters[ci] = new Context(outcomePattern, alpha); } this.evalParams = new EvalParameters(cParameters, monitor.OutcomeLabels.Length); this.prior = new UniformPrior(); this.modelType = ModelTypeEnum.MaxentQn; this.parameters = parameters; }
public virtual QNModel trainModel(DataIndexer indexer) { LogLikelihoodFunction objectiveFunction = generateFunction(indexer); this.dimension = objectiveFunction.DomainDimension; this.updateInfo = new QNInfo(this, this.m, this.dimension); double[] initialPoint = objectiveFunction.InitialPoint; double initialValue = objectiveFunction.valueAt(initialPoint); double[] initialGrad = objectiveFunction.gradientAt(initialPoint); LineSearchResult lsr = LineSearchResult.getInitialObject(initialValue, initialGrad, initialPoint, 0); int z = 0; while (true) { if (verbose) { Console.Write(z++); } double[] direction = null; direction = computeDirection(objectiveFunction, lsr); lsr = LineSearch.doLineSearch(objectiveFunction, direction, lsr, verbose); updateInfo.updateInfo(lsr); if (isConverged(lsr)) { break; } } return(new QNModel(objectiveFunction, lsr.NextPoint)); }