Ejemplo n.º 1
0
        /// <summary>
        /// Initializes a new instance of the <see cref="GradientCheckingMonitor"/> class.
        /// </summary>
        /// <param name="termCrit">The termination criterion</param>
        /// <param name="gradientCheckInterval">The gradient check interval.</param>
        public GradientCheckingMonitor(ITerminationCriterion termCrit, int gradientCheckInterval)
        {
            Contracts.CheckParam(gradientCheckInterval > 0, nameof(gradientCheckInterval),
                                 "gradientCheckInterval must be positive.");

            _termCrit          = termCrit;
            _gradCheckInterval = gradientCheckInterval;
        }
Ejemplo n.º 2
0
 /// <summary>
 /// Create an L1Optimizer with the supplied value of M and termination criterion
 /// </summary>
 /// <param name="env">The environment</param>
 /// <param name="biasCount">Number of biases</param>
 /// <param name="l1weight">Weight of L1 regularizer</param>
 /// <param name="m">The number of previous iterations to store</param>
 /// <param name="keepDense">Whether the optimizer will keep its internal state dense</param>
 /// <param name="term">Termination criterion</param>
 /// <param name="enforceNonNegativity">The flag enforcing the non-negativity constraint</param>
 public L1Optimizer(IHostEnvironment env, int biasCount, Float l1weight, int m = 20, bool keepDense = false,
                    ITerminationCriterion term = null, bool enforceNonNegativity = false)
     : base(env, m, keepDense, term, enforceNonNegativity)
 {
     Env.Check(biasCount >= 0);
     Env.Check(l1weight >= 0);
     _biasCount = biasCount;
     _l1weight  = l1weight;
 }
Ejemplo n.º 3
0
        //Override default termination criterion MeanRelativeImprovementCriterion with
        protected override Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory,
                                                         out VBuffer <float> init, out ITerminationCriterion terminationCriterion)
        {
            var opt = base.InitializeOptimizer(ch, cursorFactory, out init, out terminationCriterion);

            // MeanImprovementCriterion:
            //   Terminates when the geometrically-weighted average improvement falls below the tolerance
            terminationCriterion = new MeanImprovementCriterion(OptTol, 0.25f, MaxIterations);

            return(opt);
        }
Ejemplo n.º 4
0
 /// <summary>
 /// Create an optimizer with the supplied value of M and termination criterion
 /// </summary>
 /// <param name="env">The host environment</param>
 /// <param name="m">The number of previous iterations to store</param>
 /// <param name="keepDense">Whether the optimizer will keep its internal state dense</param>
 /// <param name="term">Termination criterion, defaults to MeanRelativeImprovement if null</param>
 /// <param name="enforceNonNegativity">The flag enforcing the non-negativity constraint</param>
 public Optimizer(IHostEnvironment env, int m = 20, bool keepDense = false, ITerminationCriterion term = null,
                  bool enforceNonNegativity   = false)
 {
     Contracts.CheckValue(env, nameof(env));
     Env                  = env;
     M                    = m;
     KeepDense            = keepDense;
     _staticTerm          = term ?? new MeanRelativeImprovementCriterion();
     TotalMemoryLimit     = -1;
     EnforceNonNegativity = enforceNonNegativity;
 }
Ejemplo n.º 5
0
        protected virtual Optimizer InitializeOptimizer(IChannel ch, FloatLabelCursor.Factory cursorFactory,
                                                        out VBuffer <float> init, out ITerminationCriterion terminationCriterion)
        {
            // MeanRelativeImprovementCriterion:
            //   Stops optimization when the average objective improvement over the last
            //   n iterations, normalized by the function value, is small enough.
            terminationCriterion = new MeanRelativeImprovementCriterion(OptTol, 5, MaxIterations);

            Optimizer opt = (L1Weight > 0)
                ? new L1Optimizer(Host, BiasCount, L1Weight / NumGoodRows, MemorySize, DenseOptimizer, null, EnforceNonNegativity)
                : new Optimizer(Host, MemorySize, DenseOptimizer, null, EnforceNonNegativity);

            opt.Quiet = Quiet;

            if (_srcPredictor != null)
            {
                init = InitializeWeightsFromPredictor(_srcPredictor);
            }
            else if (InitWtsDiameter > 0)
            {
                float[] initWeights = new float[BiasCount + WeightCount];
                for (int j = 0; j < initWeights.Length; j++)
                {
                    initWeights[j] = InitWtsDiameter * (Host.Rand.NextSingle() - 0.5f);
                }
                init = new VBuffer <float>(initWeights.Length, initWeights);
            }
            else if (SgdInitializationTolerance > 0)
            {
                init = InitializeWeightsSgd(ch, cursorFactory);
            }
            else
            {
                init = VBufferUtils.CreateEmpty <float>(BiasCount + WeightCount);
            }

            return(opt);
        }
Ejemplo n.º 6
0
        /// <summary>
        /// Minimize a function using the supplied termination criterion
        /// </summary>
        /// <param name="function">The function to minimize</param>
        /// <param name="initial">The initial point</param>
        /// <param name="term">termination criterion to use</param>
        /// <param name="result">The point at the optimum</param>
        /// <param name="optimum">The optimum function value</param>
        /// <exception cref="PrematureConvergenceException">Thrown if successive points are within numeric precision of each other, but termination condition is still unsatisfied.</exception>
        public void Minimize(DifferentiableFunction function, ref VBuffer <Float> initial, ITerminationCriterion term, ref VBuffer <Float> result, out Float optimum)
        {
            const string computationName = "LBFGS Optimizer";

            using (var pch = Env.StartProgressChannel(computationName))
                using (var ch = Env.Start(computationName))
                {
                    ch.Info("Beginning optimization");
                    ch.Info("num vars: {0}", initial.Length);
                    ch.Info("improvement criterion: {0}", term.FriendlyName);

                    OptimizerState state = MakeState(ch, pch, function, ref initial);
                    term.Reset();

                    var header = new ProgressHeader(new[] { "Loss", "Improvement" }, new[] { "iterations", "gradients" });
                    pch.SetHeader(header,
                                  (Action <IProgressEntry>)(e =>
                    {
                        e.SetProgress(0, (double)(state.Iter - 1));
                        e.SetProgress(1, state.GradientCalculations);
                    }));

                    bool finished = false;
                    pch.Checkpoint(state.Value, null, 0);
                    state.UpdateDir();
                    while (!finished)
                    {
                        bool success = state.LineSearch(ch, false);
                        if (!success)
                        {
                            // problem may be numerical errors in previous gradients
                            // try to save state of optimization by discarding them
                            // and starting over with gradient descent.

                            state.DiscardOldVectors();

                            state.UpdateDir();

                            state.LineSearch(ch, true);
                        }

                        string message;
                        finished = term.Terminate(state, out message);

                        double?improvement = null;
                        double x;
                        int    end;
                        if (message != null && DoubleParser.TryParse(out x, message, 0, message.Length, out end))
                        {
                            improvement = x;
                        }

                        pch.Checkpoint(state.Value, improvement, state.Iter);

                        if (!finished)
                        {
                            state.Shift();
                            state.UpdateDir();
                        }
                    }

                    state.X.CopyTo(ref result);
                    optimum = state.Value;
                    ch.Done();
                }
        }