예제 #1
0
 /// <summary>
 /// Constructor
 /// </summary>
 /// <param name="optimizer">Wrapped optimizer.</param>
 /// <param name="gamma">Multiplicative factor of learning rate decay. Default: 0.1.</param>
 /// <param name="last_epoch">The index of last epoch. Default: -1.</param>
 /// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
 /// <returns>A scheduler</returns>
 public ExponentialLR(ILearningRateController optimizer, double gamma = 0.1, int last_epoch = -1, bool verbose = false)
 {
     if (optimizer == null)
     {
         throw new ArgumentNullException("optimizer");
     }
     _optimizer = optimizer;
     _initial   = optimizer.LearningRate;
     _gamma     = gamma;
     _last      = last_epoch;
     _verbose   = verbose;
 }
예제 #2
0
 /// <summary>
 /// Decays the learning rate of each parameter group by gamma every step_size epochs.
 /// Notice that such decay can happen simultaneously with other changes to the learning rate from outside this scheduler.
 /// When last_epoch=-1, sets initial lr as lr.
 /// </summary>
 /// <param name="optimizer">Wrapped optimizer.</param>
 /// <param name="step_size">Period of learning rate decay.</param>
 /// <param name="gamma">Multiplicative factor of learning rate decay. Default: 0.1.</param>
 /// <param name="last_epoch">The index of last epoch. Default: -1.</param>
 /// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
 /// <returns>A scheduler instance</returns>
 public static LRScheduler StepLR(ILearningRateController optimizer, uint step_size, double gamma = 0.1, int last_epoch = -1, bool verbose = false)
 {
     return(new StepLR(optimizer, step_size, gamma, last_epoch, verbose));
 }
예제 #3
0
 /// <summary>
 /// Decays the learning rate of each parameter group by gamma every epoch.
 /// Notice that such decay can happen simultaneously with other changes to the learning rate from outside this scheduler.
 /// When last_epoch=-1, sets initial lr as lr.
 /// </summary>
 /// <param name="optimizer">Wrapped optimizer.</param>
 /// <param name="gamma">Multiplicative factor of learning rate decay. Default: 0.1.</param>
 /// <param name="last_epoch">The index of last epoch. Default: -1.</param>
 /// <param name="verbose"> If true, prints a message to stdout for each update. Default: false.</param>
 /// <returns>A scheduler</returns>
 public static LRScheduler ExponentialLR(ILearningRateController optimizer, double gamma = 0.1, int last_epoch = -1, bool verbose = false)
 {
     return(new impl.ExponentialLR(optimizer, gamma, last_epoch, verbose));
 }