示例#1
0
 public MonitorFunction(LambdaSolve m, CGRunner.LikelihoodFunction lf, string filename)
 {
     // end static class LikelihoodFunction
     // = 0
     this.model    = m;
     this.lf       = lf;
     this.filename = filename;
 }
示例#2
0
        /// <summary>Solves the problem using OWLQN.</summary>
        /// <remarks>
        /// Solves the problem using OWLQN.  The solution
        /// is stored in the
        /// <c>lambda</c>
        /// array of
        /// <c>prob</c>
        /// .  Note that the
        /// likelihood function will be a penalized L2 likelihood function unless you
        /// have turned this off via setting the priorSigmaS to 0.0.
        /// </remarks>
        /// <param name="weight">
        /// Controls the sparseness/regularization of the L1 solution.
        /// The bigger the number the sparser the solution.  Weights between
        /// 0.01 and 1.0 typically give good performance.
        /// </param>
        public virtual void SolveL1(double weight)
        {
            CGRunner.LikelihoodFunction df  = new CGRunner.LikelihoodFunction(prob, tol, useGaussianPrior, priorSigmaS, sigmaSquareds);
            IMinimizer <IDiffFunction>  owl = ReflectionLoading.LoadByReflection("edu.stanford.nlp.optimization.OWLQNMinimizer", weight);

            prob.lambda = owl.Minimize(df, tol, new double[df.DomainDimension()]);
            PrintOptimizationResults(df, null);
        }
示例#3
0
        /// <summary>Solves the problem using conjugate gradient (CG).</summary>
        /// <remarks>
        /// Solves the problem using conjugate gradient (CG).  The solution
        /// is stored in the
        /// <c>lambda</c>
        /// array of
        /// <c>prob</c>
        /// .
        /// </remarks>
        public virtual void SolveCG()
        {
            CGRunner.LikelihoodFunction df      = new CGRunner.LikelihoodFunction(prob, tol, useGaussianPrior, priorSigmaS, sigmaSquareds);
            CGRunner.MonitorFunction    monitor = new CGRunner.MonitorFunction(prob, df, filename);
            IMinimizer <IDiffFunction>  cgm     = new CGMinimizer(monitor);

            // all parameters are started at 0.0
            prob.lambda = cgm.Minimize(df, tol, new double[df.DomainDimension()]);
            PrintOptimizationResults(df, monitor);
        }
示例#4
0
        // not used
        private void PrintOptimizationResults(CGRunner.LikelihoodFunction df, CGRunner.MonitorFunction monitor)
        {
            double negLogLike = df.ValueAt(prob.lambda);

            System.Console.Error.Printf("After optimization neg (penalized) log cond likelihood: %1.2f%n", negLogLike);
            if (monitor != null)
            {
                monitor.ReportMonitoring(negLogLike);
            }
            int numNonZero = 0;

            for (int i = 0; i < prob.lambda.Length; i++)
            {
                if (prob.lambda[i] != 0.0)
                {
                    // 0.0 == -0.0 in IEEE math!
                    numNonZero++;
                }
            }
            System.Console.Error.Printf("Non-zero parameters: %d/%d (%1.2f%%)%n", numNonZero, prob.lambda.Length, (100.0 * numNonZero) / prob.lambda.Length);
        }