/// <summary> /// Initializes a new instance of the <see cref="Adam" /> class. /// </summary> /// <param name="learningRate">The learning rate.</param> /// <param name="momentum">The momentum.</param> /// <param name="varianceMomentum">The variance momentum.</param> /// <param name="unitGain">if set to <c>true</c> [unit gain].</param> /// <param name="epsilon">The epsilon.</param> public Adam( double learningRate = 0.001, double momentum = 0.9, double varianceMomentum = 0.999, bool unitGain = true, double epsilon = 1e-08f, RegularizerBase regularizer = null) : base(learningRate, regularizer) { Momentum = momentum; VarianceMomentum = varianceMomentum; Epsilon = epsilon; UnitGain = unitGain; }
/// <summary> /// Initializes a new instance of the <see cref="RMSProp" /> class. /// </summary> /// <param name="learningRate">The learning rate.</param> /// <param name="gamma">The gamma.</param> /// <param name="inc">The inc.</param> /// <param name="dec">The decimal.</param> /// <param name="min">The minimum.</param> /// <param name="max">The maximum.</param> public RMSProp( double learningRate = 0.001, double gamma = 0.9, double inc = 2, double dec = 0.01, double min = 0.01, double max = 1, RegularizerBase regularizer = null) : base(learningRate, regularizer) { Gamma = gamma; Increment = inc; Decrement = dec; Min = min; Max = max; }
/// <summary> /// Initializes a new instance of the <see cref="AdaDelta" /> class. /// </summary> /// <param name="learningRate">The learning rate.</param> /// <param name="rho">The rho.</param> /// <param name="epsilon">The epsilon.</param> /// <param name="regularizer">The regularizer.</param> public AdaDelta(double learningRate = 1.0, double rho = 0.95, double epsilon = 1e-08, RegularizerBase regularizer = null) : base(learningRate, regularizer) { Rho = rho; Epsilon = epsilon; }
protected OptimizerBase(double learningRate, RegularizerBase regularizer) { LearningRate = learningRate; Regularizer = regularizer; }
/// <summary> /// Initializes a new instance of the <see cref="SGD" /> class. /// </summary> /// <param name="learningRate">The learning rate.</param> public SGD(double learningRate = 0.01, RegularizerBase regularizer = null) : base(learningRate, regularizer) { }