Exemple #1
0
        /// <summary>Solves the problem using conjugate gradient (CG).</summary>
        /// <remarks>
        /// Solves the problem using conjugate gradient (CG).  The solution
        /// is stored in the
        /// <c>lambda</c>
        /// array of
        /// <c>prob</c>
        /// .
        /// </remarks>
        public virtual void SolveCG()
        {
            CGRunner.LikelihoodFunction df      = new CGRunner.LikelihoodFunction(prob, tol, useGaussianPrior, priorSigmaS, sigmaSquareds);
            CGRunner.MonitorFunction    monitor = new CGRunner.MonitorFunction(prob, df, filename);
            IMinimizer <IDiffFunction>  cgm     = new CGMinimizer(monitor);

            // all parameters are started at 0.0
            prob.lambda = cgm.Minimize(df, tol, new double[df.DomainDimension()]);
            PrintOptimizationResults(df, monitor);
        }
Exemple #2
0
        // not used
        private void PrintOptimizationResults(CGRunner.LikelihoodFunction df, CGRunner.MonitorFunction monitor)
        {
            double negLogLike = df.ValueAt(prob.lambda);

            System.Console.Error.Printf("After optimization neg (penalized) log cond likelihood: %1.2f%n", negLogLike);
            if (monitor != null)
            {
                monitor.ReportMonitoring(negLogLike);
            }
            int numNonZero = 0;

            for (int i = 0; i < prob.lambda.Length; i++)
            {
                if (prob.lambda[i] != 0.0)
                {
                    // 0.0 == -0.0 in IEEE math!
                    numNonZero++;
                }
            }
            System.Console.Error.Printf("Non-zero parameters: %d/%d (%1.2f%%)%n", numNonZero, prob.lambda.Length, (100.0 * numNonZero) / prob.lambda.Length);
        }