예제 #1
0
 /// <summary>
 /// Prepares the ITerminationCriterion for a new round of optimization
 /// </summary>
 public void Reset()
 {
     _termCrit.Reset();
 }
예제 #2
0
        /// <summary>
        /// Minimize a function using the supplied termination criterion
        /// </summary>
        /// <param name="function">The function to minimize</param>
        /// <param name="initial">The initial point</param>
        /// <param name="term">termination criterion to use</param>
        /// <param name="result">The point at the optimum</param>
        /// <param name="optimum">The optimum function value</param>
        /// <exception cref="PrematureConvergenceException">Thrown if successive points are within numeric precision of each other, but termination condition is still unsatisfied.</exception>
        public void Minimize(DifferentiableFunction function, ref VBuffer <Float> initial, ITerminationCriterion term, ref VBuffer <Float> result, out Float optimum)
        {
            const string computationName = "LBFGS Optimizer";

            using (var pch = Env.StartProgressChannel(computationName))
                using (var ch = Env.Start(computationName))
                {
                    ch.Info("Beginning optimization");
                    ch.Info("num vars: {0}", initial.Length);
                    ch.Info("improvement criterion: {0}", term.FriendlyName);

                    OptimizerState state = MakeState(ch, pch, function, ref initial);
                    term.Reset();

                    var header = new ProgressHeader(new[] { "Loss", "Improvement" }, new[] { "iterations", "gradients" });
                    pch.SetHeader(header,
                                  (Action <IProgressEntry>)(e =>
                    {
                        e.SetProgress(0, (double)(state.Iter - 1));
                        e.SetProgress(1, state.GradientCalculations);
                    }));

                    bool finished = false;
                    pch.Checkpoint(state.Value, null, 0);
                    state.UpdateDir();
                    while (!finished)
                    {
                        bool success = state.LineSearch(ch, false);
                        if (!success)
                        {
                            // problem may be numerical errors in previous gradients
                            // try to save state of optimization by discarding them
                            // and starting over with gradient descent.

                            state.DiscardOldVectors();

                            state.UpdateDir();

                            state.LineSearch(ch, true);
                        }

                        string message;
                        finished = term.Terminate(state, out message);

                        double?improvement = null;
                        double x;
                        int    end;
                        if (message != null && DoubleParser.TryParse(out x, message, 0, message.Length, out end))
                        {
                            improvement = x;
                        }

                        pch.Checkpoint(state.Value, improvement, state.Iter);

                        if (!finished)
                        {
                            state.Shift();
                            state.UpdateDir();
                        }
                    }

                    state.X.CopyTo(ref result);
                    optimum = state.Value;
                    ch.Done();
                }
        }